hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
be1fe1edfef0a5b0c9144e72f09f8b6e5ed105d5
| 35
|
py
|
Python
|
scripts/portal/back_Ludi.py
|
pantskun/swordiemen
|
fc33ffec168e6611587fdc75de8270f6827a4176
|
[
"MIT"
] | null | null | null |
scripts/portal/back_Ludi.py
|
pantskun/swordiemen
|
fc33ffec168e6611587fdc75de8270f6827a4176
|
[
"MIT"
] | null | null | null |
scripts/portal/back_Ludi.py
|
pantskun/swordiemen
|
fc33ffec168e6611587fdc75de8270f6827a4176
|
[
"MIT"
] | null | null | null |
# 223000000
sm.warp(220000000, 26)
| 11.666667
| 22
| 0.742857
| 5
| 35
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.645161
| 0.114286
| 35
| 2
| 23
| 17.5
| 0.193548
| 0.257143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
07be40ff2039cd64caae6a92cd19e16edbe96de2
| 15,029
|
py
|
Python
|
edo_client/api/package.py
|
easydo-cn/edo_client
|
775f185c54f2eeda6a7dd6482de8228ca9ad89b0
|
[
"Apache-2.0"
] | null | null | null |
edo_client/api/package.py
|
easydo-cn/edo_client
|
775f185c54f2eeda6a7dd6482de8228ca9ad89b0
|
[
"Apache-2.0"
] | null | null | null |
edo_client/api/package.py
|
easydo-cn/edo_client
|
775f185c54f2eeda6a7dd6482de8228ca9ad89b0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import json
from .base import BaseApi
from collections import OrderedDict
class PackageApi(BaseApi):
def list(self, account=None, instance=None, in_memory=None):
""" 取得所有的软件包"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/list', raw=True, account=account, instance=instance, in_memory=json.dumps(in_memory))
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def get(self, package_name, detail=False, account=None, instance=None):
""" 取得软件包的信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get', raw=True, package_name=package_name, detail=json.dumps(detail), account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def new(self, package_name, info, account=None, instance=None):
""" 取得软件包的信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/new', package_name=package_name, info=json.dumps(info), account=account, instance=instance)
def install(self, package_name, upgrade=True, is_temp=False, account=None, instance=None):
""" 安装软件包"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/install', package_name=package_name, upgrade=json.dumps(upgrade),
is_temp=json.dumps(is_temp), account=account, instance=instance)
def set(self, package_name, info, account=None, instance=None):
""" 取得软件包的信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/set', package_name=package_name, info=json.dumps(info), account=account, instance=instance)
def remove(self, package_name, account=None, instance=None):
""" 取得软件包的信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove', package_name=package_name, account=account, instance=instance)
def register_form(self, name, form_def, overwrite=False, account=None, instance=None):
""" 注册表单"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_form', name=name, form_def=json.dumps(form_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_forms(self, package_name, account=None, instance=None):
""" 列出所有表单"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_forms', package_name=package_name, account=account, instance=instance)
def get_form(self, name, account=None, instance=None):
""" 取得表单信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_form', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_form(self, name, account=None, instance=None):
""" 删除表单"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_form', name=name, account=account, instance=instance)
def register_script(self, name, script_def, overwrite=False, account=None, instance=None):
""" 注册脚本"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_script', name=name, code_def=json.dumps(script_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_scripts(self, package_name, account=None, instance=None):
""" 列出所有脚本"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_scripts', package_name=package_name, account=account, instance=instance)
def get_script(self, name, account=None, instance=None):
""" 取得脚本信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_script', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_script(self, name, account=None, instance=None):
""" 删除脚本"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_script', name=name, account=account, instance=instance)
def register_rule(self, name, rule_def, overwrite=False, account=None, instance=None):
""" 注册规则"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_rule', name=name, rule_def=json.dumps(rule_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_rules(self, package_name, account=None, instance=None):
""" 列出所有规则"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_rules', package_name=package_name, account=account, instance=instance)
def get_rule(self, name, account=None, instance=None):
""" 取得规则信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_rule', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_rule(self, name, account=None, instance=None):
""" 删除规则"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_rule', name=name, account=account, instance=instance)
def register_template(self, name, template_def, overwrite=False, account=None, instance=None):
""" 注册模板"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_template', name=name, template_def=json.dumps(template_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_templates(self, package_name, account=None, instance=None):
""" 列出所有模板"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_templates', package_name=package_name, account=account, instance=instance)
def get_template(self, name, account=None, instance=None):
""" 取得模板信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_template', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_template(self, name, account=None, instance=None):
""" 删除模板"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_template', name=name, account=account, instance=instance)
def register_mdset(self, name, mdset_def, overwrite=False, account=None, instance=None):
""" 注册属性集"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_mdset', name=name, mdset_def=json.dumps(mdset_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_mdsets(self, package_name, account=None, instance=None):
""" 列出所有属性集"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_mdsets', package_name=package_name, account=account, instance=instance)
def get_mdset(self, name, account=None, instance=None):
""" 取得属性集信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_mdset', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_mdset(self, name, account=None, instance=None):
""" 删除属性集"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_mdset', name=name, account=account, instance=instance)
def register_stage(self, name, stage_def, overwrite=False, account=None, instance=None):
""" 注册阶段"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_stage', name=name, stage_def=json.dumps(stage_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_stages(self, package_name, account=None, instance=None):
""" 列出所有阶段"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_stages', package_name=package_name, account=account, instance=instance)
def get_stage(self, name, account=None, instance=None):
""" 取得阶段信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_stage', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_stage(self, name, account=None, instance=None):
""" 删除阶段"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_stage', name=name, account=account, instance=instance)
def register_workflow(self, name, workflow_def, overwrite=False, account=None, instance=None):
""" 注册流程"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_workflow', name=name, workflow_def=json.dumps(workflow_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_workflows(self, package_name, account=None, instance=None):
""" 列出所有流程"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_workflows', package_name=package_name, account=account, instance=instance)
def get_workflow(self, name, account=None, instance=None):
""" 取得流程信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_workflow', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_workflow(self, name, account=None, instance=None):
""" 删除流程"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_workflow', name=name, account=account, instance=instance)
def register_skin(self, name, skin_def, overwrite=False, account=None, instance=None):
""" 注册皮肤"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_skin', name=name, skin_def=json.dumps(skin_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_skins(self, package_name, account=None, instance=None):
""" 列出所有皮肤"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_skins', package_name=package_name, account=account, instance=instance)
def get_skin(self, name, account=None, instance=None):
""" 取得皮肤信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_skin', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_skin(self, name, account=None, instance=None):
""" 删除皮肤"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_skin', name=name, account=account, instance=instance)
def add_resource(self, package_name, res_path, stream, overwrite=False, account=None, instance=None):
""" 注册资源"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/add_resource', package_name=package_name, res_path=res_path, files={'stream':('resource', stream)}, overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_resources(self, package_name, account=None, instance=None):
""" 列出所有资源"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_resources', package_name=package_name, account=account, instance=instance)
def get_resource(self, package_name, res_path='/', account=None, instance=None):
""" 取得资源信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_resource', raw=True, package_name=package_name, res_path=res_path, account=account, instance=instance)
return resp
def remove_resource(self, package_name, res_path, account=None, instance=None):
""" 删除资源"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_resource', package_name=package_name, res_path=res_path, account=account, instance=instance)
| 43.944444
| 211
| 0.689866
| 1,943
| 15,029
| 5.17756
| 0.058157
| 0.04175
| 0.079324
| 0.096024
| 0.872266
| 0.865109
| 0.854473
| 0.724254
| 0.690159
| 0.679523
| 0
| 0.003548
| 0.193692
| 15,029
| 341
| 212
| 44.073314
| 0.826622
| 0.019496
| 0
| 0.51087
| 0
| 0
| 0.079403
| 0.073051
| 0
| 0
| 0
| 0
| 0
| 1
| 0.228261
| false
| 0
| 0.016304
| 0
| 0.478261
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
07c1767e12021af3df9ea47c9034f0ecb9f8721a
| 11,725
|
py
|
Python
|
tests/test_data/test_segmenter.py
|
sibange/padertorch
|
494692d877f04c66847c2943795b23aea488217d
|
[
"MIT"
] | 62
|
2019-12-22T08:30:29.000Z
|
2022-03-22T11:02:59.000Z
|
tests/test_data/test_segmenter.py
|
sibange/padertorch
|
494692d877f04c66847c2943795b23aea488217d
|
[
"MIT"
] | 47
|
2020-01-06T09:23:47.000Z
|
2022-01-24T16:55:06.000Z
|
tests/test_data/test_segmenter.py
|
sibange/padertorch
|
494692d877f04c66847c2943795b23aea488217d
|
[
"MIT"
] | 13
|
2019-12-16T08:12:46.000Z
|
2021-11-08T14:37:06.000Z
|
from padertorch.data.segment import Segmenter
import numpy as np
import torch
def test_simple_case():
segmenter = Segmenter(length=32000, include_keys=('x', 'y'),
shift=16000)
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'])
def test_fixed_anchor():
segmenter = Segmenter(length=32000, include_keys=('x', 'y'),
shift=16000, anchor=10)
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], 10 + np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'])
def test_random_anchor():
"""
Checks fix for random anchor in https://github.com/fgnt/padertorch/pull/91
"""
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmenter = Segmenter(length=32000, include_keys=('x', 'y'),
shift=32000, anchor='random')
segmented = segmenter(ex)
assert type(segmented) == list, segmented
segmenter = Segmenter(length=32000, include_keys=('x', 'y'),
shift=32000, anchor='random_max_segments')
segmented = segmenter(ex)
assert type(segmented) == list, segmented
assert len(segmented) == 2
def test_copy_keys():
segmenter = Segmenter(length=32000, include_keys=('x', 'y'),
shift=16000, copy_keys='gender')
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
expected_keys = [key for key in ex.keys() if not key == 'num_samples']
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in expected_keys])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'])
def test_include_none():
segmenter = Segmenter(length=32000, shift=16000)
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'])
def test_include_to_larger():
segmenter = Segmenter(length=32000, shift=16000,
include_keys=['x', 'y', 'z'])
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
error = False
try:
segmenter(ex)
except AssertionError:
error = True
assert error, segmenter
def test_include_none_with_torch():
segmenter = Segmenter(length=32000, shift=16000)
array = np.random.randn(5,10,64000)
ex = {'x': array.copy(), 'y': array.copy(),
'z': torch.tensor(array),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(entry['x'], entry['z'].numpy())
np.testing.assert_equal(entry['x'], entry['y'])
def test_error_include_list():
segmenter = Segmenter(length=32000, shift=16000,
include_keys=['x', 'y', 'z'])
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'z': np.arange(65000).tolist(),
'num_samples': 65000, 'gender': 'm'}
error = False
try:
segmenter(ex)
except ValueError:
error = True
assert error, segmenter
def test_include_none_ignore_list():
segmenter = Segmenter(length=32000, shift=16000)
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'z': np.arange(65000).tolist(),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
segmenter = Segmenter(length=32000, shift=16000,
copy_keys=['num_samples', 'gender'])
segmented = segmenter(ex)
assert type(segmented) == list, segmented
expected_keys = ['x', 'y', 'num_samples', 'gender']
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in expected_keys])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'])
def test_include_exclude():
segmenter = Segmenter(length=32000, shift=16000, exclude_keys='y')
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['y'], np.arange(65000))
def test_axis():
segmenter = Segmenter(length=32000, shift=16000, include_keys=['x', 'y'],
axis=[-1, 0])
ex = {'x': np.arange(65000), 'y': np.arange(65000)[:, None],
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'][:, 0])
segmenter = Segmenter(length=32000, shift=16000,
include_keys=['x', 'y', 'z'],
axis={'x': 0, 'y': 1, 'z': -1})
array = np.random.randn(65000, 5, 10)
ex = {'x': array.copy(), 'y': array.copy().transpose(1,0,2),
'z': torch.tensor(array.transpose(1,2,0)),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(entry['x'], entry['z'].numpy().transpose(2,0,1))
np.testing.assert_equal(entry['x'], entry['y'].transpose(1,0,2))
def test_axis_dict():
segmenter = Segmenter(length=32000, shift=16000, include_keys=['x', 'y'],
axis={'x': -1, 'y': 0})
ex = {'x': np.arange(65000), 'y': np.arange(65000)[:, None],
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'][:, 0])
def test_axis_dict_wildcard():
segmenter = Segmenter(length=32000, shift=16000,
include_keys=['audio_data'],
axis={'audio_data': -1})
ex = {'audio_data': {'x': np.arange(65000), 'y': np.arange(65000)},
'z': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['audio_data']['x'],
np.arange(idx * 16000, 16000 + (idx + 1) * 16000)
)
np.testing.assert_equal(entry['audio_data']['x'],
entry['audio_data']['y'])
np.testing.assert_equal(entry['z'],
np.arange(65000))
def test_wildcard():
segmenter = Segmenter(length=32000, shift=16000,
include_keys=['audio_data'])
ex = {'audio_data': {'x': np.arange(65000), 'y': np.arange(65000)},
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['audio_data']['x'], np.arange(
idx * 16000, 16000 + (idx + 1) * 16000)
)
np.testing.assert_equal(entry['audio_data']['x'],
entry['audio_data']['y'])
def test_wildcard_exclude():
ex = {
'audio_data': {'x': np.arange(65000), 'y': np.arange(65000)[:, None]},
'z': np.arange(65000)[:, None],
'num_samples': 65000, 'gender': 'm'
}
segmenter = Segmenter(length=32000, shift=16000,
include_keys=['audio_data'],
exclude_keys=['audio_data.y'],
axis={'audio_data': -1})
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['audio_data']['x'],
np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['audio_data']['y'],
np.arange(65000)[:, None])
def test_length_mode():
examples = [{'x': np.arange(16000), 'y': np.arange(16000),
'num_samples': 16000, 'gender': 'm'},
{'x': np.arange(15900), 'y': np.arange(15900),
'num_samples': 15900, 'gender': 'm'}]
new_length = [{'constant': 950, 'max': 942, 'min': 1000},
{'constant': 950, 'max': 936, 'min': 994}]
for mode in ['constant', 'max', 'min']:
for idx, ex in enumerate(examples):
segmenter = Segmenter(length=950, include_keys=('x'),
mode=mode, padding=True)
segmented = segmenter(ex)
np.testing.assert_equal(segmented[0]['x'],
np.arange(0, new_length[idx][mode]))
new_length = [{'constant': 950, 'max': 947, 'min': 951},
{'constant': 950, 'max': 950, 'min': 954}]
for mode in ['constant', 'max', 'min']:
for idx, ex in enumerate(examples):
segmenter = Segmenter(length=950, shift=250, include_keys=('x'),
mode=mode, padding=True)
segmented = segmenter(ex)
np.testing.assert_equal(segmented[0]['x'],
np.arange(0, new_length[idx][mode]))
| 41.285211
| 80
| 0.56435
| 1,462
| 11,725
| 4.434337
| 0.071819
| 0.065402
| 0.070184
| 0.09255
| 0.871356
| 0.849298
| 0.831251
| 0.82323
| 0.802252
| 0.78112
| 0
| 0.087752
| 0.268145
| 11,725
| 283
| 81
| 41.431095
| 0.667754
| 0.006311
| 0
| 0.695473
| 0
| 0
| 0.066523
| 0
| 0
| 0
| 0
| 0
| 0.263374
| 1
| 0.065844
| false
| 0
| 0.012346
| 0
| 0.078189
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ed1ee38906cf72dab813e83c9cdf501be04cace4
| 111
|
py
|
Python
|
skfda/ml/regression/__init__.py
|
jdtuck/scikit-fda
|
28259dffbc45dfc8dbf3c12839b928f9df200351
|
[
"BSD-3-Clause"
] | 1
|
2020-06-27T22:25:49.000Z
|
2020-06-27T22:25:49.000Z
|
skfda/ml/regression/__init__.py
|
jdtuck/scikit-fda
|
28259dffbc45dfc8dbf3c12839b928f9df200351
|
[
"BSD-3-Clause"
] | null | null | null |
skfda/ml/regression/__init__.py
|
jdtuck/scikit-fda
|
28259dffbc45dfc8dbf3c12839b928f9df200351
|
[
"BSD-3-Clause"
] | null | null | null |
from ..._neighbors import KNeighborsRegressor, RadiusNeighborsRegressor
from .linear import LinearRegression
| 22.2
| 71
| 0.855856
| 9
| 111
| 10.444444
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099099
| 111
| 4
| 72
| 27.75
| 0.94
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ed64bff4b5d7dcd4f3eefb3a81710c32bd79ebfd
| 136
|
py
|
Python
|
proxy/subject.py
|
rlelito/DesignPatterns
|
4e59442a10c1407ed4d9cdceea790263c30223b3
|
[
"MIT"
] | null | null | null |
proxy/subject.py
|
rlelito/DesignPatterns
|
4e59442a10c1407ed4d9cdceea790263c30223b3
|
[
"MIT"
] | null | null | null |
proxy/subject.py
|
rlelito/DesignPatterns
|
4e59442a10c1407ed4d9cdceea790263c30223b3
|
[
"MIT"
] | null | null | null |
from abc import ABC
from abc import abstractmethod
class Subject(ABC):
@abstractmethod
def request(self) -> str:
pass
| 15.111111
| 30
| 0.683824
| 17
| 136
| 5.470588
| 0.647059
| 0.150538
| 0.27957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 136
| 8
| 31
| 17
| 0.911765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.166667
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
ed6730c21c949aed6758f46635596d4bbb3a87d1
| 4,633
|
py
|
Python
|
tests/commands/test_rdmrecords_identifiers.py
|
rekt-hard/repository-cli
|
a1247435be1e2bb6b68f940a4aca98d311dc780e
|
[
"MIT"
] | null | null | null |
tests/commands/test_rdmrecords_identifiers.py
|
rekt-hard/repository-cli
|
a1247435be1e2bb6b68f940a4aca98d311dc780e
|
[
"MIT"
] | 12
|
2021-03-15T13:23:01.000Z
|
2021-06-11T09:21:24.000Z
|
tests/commands/test_rdmrecords_identifiers.py
|
rekt-hard/repository-cli
|
a1247435be1e2bb6b68f940a4aca98d311dc780e
|
[
"MIT"
] | 2
|
2021-03-17T16:27:34.000Z
|
2021-05-20T06:33:16.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 Graz University of Technology.
#
# repository-cli is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Pytest configuration.
See https://pytest-invenio.readthedocs.io/ for documentation on which test
fixtures are available.
"""
import json
import pytest
from flask import Flask
from flask_babelex import Babel
from repository_cli import RepositoryCli
from repository_cli.cli.records import (add_identifier, list_identifiers,
replace_identifier)
def test_list_identifiers(app_initialized, create_record):
runner = app_initialized.test_cli_runner()
r_id = create_record.id
response = runner.invoke(list_identifiers, ["--pid", r_id])
assert response.exit_code == 0
assert "scheme" in response.output
assert "identifier" in response.output
def test_list_identifiers_record_not_found(app_initialized):
runner = app_initialized.test_cli_runner()
r_id = "this does not exist"
response = runner.invoke(list_identifiers, ["--pid", r_id])
assert response.exit_code == 0
assert "does not exist or is deleted" in response.output
def test_add_identifier(app_initialized, identifier, create_record):
runner = app_initialized.test_cli_runner()
r_id = create_record.id
response = runner.invoke(
add_identifier, ["--pid", r_id, "--identifier", json.dumps(identifier)]
)
assert response.exit_code == 0
assert f"Identifier for '{r_id}' added" in response.output
def test_add_identifier_scheme_exists(
app_initialized, identifier, create_record
):
runner = app_initialized.test_cli_runner()
r_id = create_record.id
response = runner.invoke(
add_identifier, ["--pid", r_id, "--identifier", json.dumps(identifier)]
)
assert response.exit_code == 0
assert f"Identifier for '{r_id}' added" in response.output
response = runner.invoke(
add_identifier, ["--pid", r_id, "--identifier", json.dumps(identifier)]
)
assert response.exit_code == 0
assert (
f"scheme '{identifier['scheme']}' already in identifiers"
in response.output
)
def test_add_identifier_wrong_identifier_type(app_initialized, create_record):
runner = app_initialized.test_cli_runner()
r_id = create_record.id
response = runner.invoke(
add_identifier, ["--pid", r_id, "--identifier", "this is not a dict"]
)
assert response.exit_code == 0
assert "identifier is not valid JSON" in response.output
def test_add_identifiers_record_not_found(app_initialized, identifier):
runner = app_initialized.test_cli_runner()
r_id = "this does not exist"
response = runner.invoke(
add_identifier, ["--pid", r_id, "--identifier", json.dumps(identifier)]
)
assert response.exit_code == 0
assert "does not exist or is deleted" in response.output
def test_replace_identifier(app_initialized, create_record):
runner = app_initialized.test_cli_runner()
r_id = create_record.id
new_identifier = create_record["metadata"]["identifiers"][0]
response = runner.invoke(
replace_identifier,
["--pid", r_id, "--identifier", json.dumps(new_identifier)],
)
assert response.exit_code == 0
assert f"Identifier for '{r_id}' replaced" in response.output
def test_replace_identifier_scheme_does_not_exist(
app_initialized, identifier, create_record
):
runner = app_initialized.test_cli_runner()
r_id = create_record.id
response = runner.invoke(
replace_identifier,
["--pid", r_id, "--identifier", json.dumps(identifier)],
)
assert response.exit_code == 0
assert (
f"scheme '{identifier['scheme']}' not in identifiers"
in response.output
)
def test_replace_identifier_wrong_identifier_type(
app_initialized, create_record
):
runner = app_initialized.test_cli_runner()
r_id = create_record.id
response = runner.invoke(
replace_identifier,
["--pid", r_id, "--identifier", "this is not a dict"],
)
assert response.exit_code == 0
assert "identifier is not valid JSON" in response.output
def test_replace_identifiers_record_not_found(app_initialized, identifier):
runner = app_initialized.test_cli_runner()
r_id = "this does not exist"
response = runner.invoke(
replace_identifier,
["--pid", r_id, "--identifier", json.dumps(identifier)],
)
assert response.exit_code == 0
assert "does not exist or is deleted" in response.output
| 32.626761
| 79
| 0.703648
| 596
| 4,633
| 5.218121
| 0.159396
| 0.023151
| 0.061736
| 0.077814
| 0.80418
| 0.796785
| 0.781029
| 0.725723
| 0.725723
| 0.725723
| 0
| 0.004552
| 0.193827
| 4,633
| 141
| 80
| 32.858156
| 0.828112
| 0.073171
| 0
| 0.641509
| 0
| 0
| 0.14596
| 0.01121
| 0
| 0
| 0
| 0
| 0.216981
| 1
| 0.09434
| false
| 0
| 0.056604
| 0
| 0.150943
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ed6f6ee65bfe8b52be86a872e0d2c9912cb31081
| 128
|
py
|
Python
|
graphgallery/functional/network/__init__.py
|
Aria461863631/GraphGallery
|
7b62f80ab36b29013bea2538a6581fc696a80201
|
[
"MIT"
] | null | null | null |
graphgallery/functional/network/__init__.py
|
Aria461863631/GraphGallery
|
7b62f80ab36b29013bea2538a6581fc696a80201
|
[
"MIT"
] | null | null | null |
graphgallery/functional/network/__init__.py
|
Aria461863631/GraphGallery
|
7b62f80ab36b29013bea2538a6581fc696a80201
|
[
"MIT"
] | null | null | null |
from .ego import ego_graph
from .degree import *
from .property import *
from .classic import *
from .to_networkx import *
| 21.333333
| 27
| 0.734375
| 18
| 128
| 5.111111
| 0.5
| 0.326087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195313
| 128
| 5
| 28
| 25.6
| 0.893204
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ed88d0d4fc55f202c7800b44298f1e02c9490349
| 110,664
|
py
|
Python
|
coremltools/test/neural_network/test_keras2_numeric.py
|
tonybove-apple/coremltools
|
22a8877beec7bad136ba5612d5aacd8e323ecdfc
|
[
"BSD-3-Clause"
] | 2,740
|
2017-10-03T23:19:01.000Z
|
2022-03-30T15:16:39.000Z
|
coremltools/test/neural_network/test_keras2_numeric.py
|
tonybove-apple/coremltools
|
22a8877beec7bad136ba5612d5aacd8e323ecdfc
|
[
"BSD-3-Clause"
] | 1,057
|
2017-10-05T22:47:01.000Z
|
2022-03-31T23:51:15.000Z
|
coremltools/test/neural_network/test_keras2_numeric.py
|
tonybove-apple/coremltools
|
22a8877beec7bad136ba5612d5aacd8e323ecdfc
|
[
"BSD-3-Clause"
] | 510
|
2017-10-04T19:22:28.000Z
|
2022-03-31T12:16:52.000Z
|
import itertools
import os
import shutil
import tempfile
import unittest
import numpy as np
import pytest
from coremltools._deps import _HAS_KERAS2_TF
from coremltools.models import _MLMODEL_FULL_PRECISION, _MLMODEL_HALF_PRECISION
from coremltools.models.utils import _macos_version, _is_macos
if _HAS_KERAS2_TF:
import keras.backend
from keras.models import Sequential, Model
from keras.layers import (
Dense,
Activation,
Conv2D,
Conv1D,
Flatten,
BatchNormalization,
Conv2DTranspose,
SeparableConv2D,
)
from keras.layers import (
MaxPooling2D,
AveragePooling2D,
GlobalAveragePooling2D,
GlobalMaxPooling2D,
)
from keras.layers import (
MaxPooling1D,
AveragePooling1D,
GlobalAveragePooling1D,
GlobalMaxPooling1D,
)
from keras.layers import Embedding, Input, Permute, Reshape, RepeatVector, Dropout
from keras.layers import Add, Concatenate
from keras.layers import add, multiply, concatenate, dot, maximum, average
from keras.layers import ZeroPadding2D, UpSampling2D, Cropping2D
from keras.layers import ZeroPadding1D, UpSampling1D, Cropping1D
from keras.layers import SimpleRNN, LSTM, GRU
from keras.layers.core import SpatialDropout2D
from keras.layers.wrappers import Bidirectional, TimeDistributed
from distutils.version import StrictVersion as _StrictVersion
if keras.__version__ >= _StrictVersion("2.2.1"):
from keras.layers import DepthwiseConv2D, ReLU
elif keras.__version__ >= _StrictVersion("2.2.0"):
from keras.layers import DepthwiseConv2D
from keras_applications.mobilenet import relu6
else:
from keras.applications.mobilenet import DepthwiseConv2D, relu6
def _keras_transpose(x, is_sequence=False):
if len(x.shape) == 5:
# Keras input shape = [Batch, Seq, Height, Width, Channels]
x = np.transpose(x, [1, 0, 4, 2, 3])
if len(x.shape) == 4:
# Keras input shape = [Batch, Height, Width, Channels]
x = np.transpose(x, [0, 3, 1, 2])
return np.expand_dims(x, axis=0)
elif len(x.shape) == 3:
# Keras input shape = [Batch, (Sequence) Length, Channels]
return np.transpose(x, [1, 0, 2])
elif len(x.shape) == 2:
if is_sequence: # (N,S) --> (S,N,1,)
return x.reshape(x.shape[::-1] + (1,))
else: # (N,C) --> (N,C,1,1)
return x.reshape((1,) + x.shape) # Dense
elif len(x.shape) == 1:
if is_sequence: # (S) --> (S,N,1,1,1)
return x.reshape((x.shape[0], 1, 1))
else:
return x
else:
return x
def _get_coreml_model(
model,
input_names=["data"],
output_names=["output"],
input_name_shape_dict={},
model_precision=_MLMODEL_FULL_PRECISION,
use_float_arraytype=False,
):
"""
Get the coreml model from the Keras model.
"""
# Convert the model
from coremltools.converters import keras as keras_converter
model = keras_converter.convert(
model,
input_names,
output_names,
input_name_shape_dict=input_name_shape_dict,
model_precision=model_precision,
use_float_arraytype=use_float_arraytype,
)
return model
def _generate_data(input_shape, mode="random"):
"""
Generate some random data according to a shape.
"""
if mode == "zeros":
X = np.zeros(input_shape)
elif mode == "ones":
X = np.ones(input_shape)
elif mode == "linear":
X = np.array(range(np.product(input_shape))).reshape(input_shape)
elif mode == "random":
X = np.random.rand(*input_shape)
elif mode == "random_zero_mean":
X = np.random.rand(*input_shape) - 0.5
return X
@unittest.skipIf(not _HAS_KERAS2_TF, "Missing keras. Skipping tests.")
@pytest.mark.keras2
class KerasNumericCorrectnessTest(unittest.TestCase):
"""
Unit test class for testing the Keras converter.
"""
def runTest(self):
pass
def _get_coreml_model_params_and_test_input(
self, model, mode, one_dim_seq_flags, input_name_shape_dict={}
):
# Generate data
nb_inputs = len(model.inputs)
if nb_inputs > 1:
input_names = []
input_data = []
coreml_input = {}
for i in range(nb_inputs):
feature_name = "data_%s" % i
input_names.append(feature_name)
if feature_name in input_name_shape_dict:
input_shape = [
1 if a is None else a
for a in input_name_shape_dict[feature_name]
]
else:
input_shape = [1 if a is None else a for a in model.input_shape[i]]
X = _generate_data(input_shape, mode)
input_data.append(X)
if one_dim_seq_flags is None:
coreml_input[feature_name] = _keras_transpose(X).astype("f").copy()
else:
coreml_input[feature_name] = (
_keras_transpose(X, one_dim_seq_flags[i]).astype("f").copy()
)
else:
input_names = ["data"]
if "data" in input_name_shape_dict:
input_shape = [
1 if a is None else a for a in input_name_shape_dict["data"]
]
else:
input_shape = [1 if a is None else a for a in model.input_shape]
input_data = _generate_data(input_shape, mode)
if one_dim_seq_flags is None:
coreml_input = {"data": _keras_transpose(input_data).astype("f").copy()}
else:
coreml_input = {
"data": _keras_transpose(input_data, one_dim_seq_flags[0])
.astype("f")
.copy()
}
output_names = ["output" + str(i) for i in range(len(model.outputs))]
return input_names, output_names, input_data, coreml_input
def _test_model(
self,
model,
input_name_shape_dict={},
num_samples=1,
mode="random",
delta=1e-2,
model_dir=None,
transpose_keras_result=True,
one_dim_seq_flags=None,
model_precision=_MLMODEL_FULL_PRECISION,
):
# transpose_keras_result: if true, compare the transposed Keras result
# one_dim_seq_flags: a list of same length as the number of inputs in
# the model; if None, treat all 1D input (if any) as non-sequence
# if one_dim_seq_flags[i] is True, it means the ith input, with shape
# (X,) is in fact a sequence of length X.
# Get the CoreML model
use_tmp_folder = False
if model_dir is None:
use_tmp_folder = True
model_dir = tempfile.mkdtemp()
(
input_names,
output_names,
input_data,
coreml_input,
) = self._get_coreml_model_params_and_test_input(
model, mode, one_dim_seq_flags, input_name_shape_dict
)
coreml_model = _get_coreml_model(
model,
input_names,
output_names,
input_name_shape_dict,
model_precision=model_precision,
)
try:
if not (_is_macos() and _macos_version() >= (10, 13)):
return
# Assuming coreml model output names are in the same order as
# Keras output list, put predictions into a list, sorted by output
# name
coreml_preds = coreml_model.predict(coreml_input)
c_preds = [coreml_preds[name] for name in output_names]
# Get Keras predictions
keras_preds = model.predict(input_data)
k_preds = keras_preds if type(keras_preds) is list else [keras_preds]
# Compare each output blob
for idx, k_pred in enumerate(k_preds):
if transpose_keras_result:
kp = _keras_transpose(k_pred).flatten()
else:
kp = k_pred.flatten()
cp = c_preds[idx].flatten()
# Compare predictions
self.assertEqual(len(kp), len(cp))
for i in range(len(kp)):
max_den = max(1.0, kp[i], cp[i])
self.assertAlmostEqual(
kp[i] / max_den, cp[i] / max_den, delta=delta
)
finally:
# Cleanup files - models on disk no longer useful
if use_tmp_folder and os.path.exists(model_dir):
shutil.rmtree(model_dir)
@unittest.skipIf(not _HAS_KERAS2_TF, "Missing keras. Skipping tests.")
@pytest.mark.keras2
class KerasBasicNumericCorrectnessTest(KerasNumericCorrectnessTest):
def test_tiny_inner_product(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
# Define a model
model = Sequential()
model.add(Dense(2, input_shape=(2,)))
# Test all zeros
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, mode="zeros", model_precision=model_precision)
# Test all ones
model.set_weights([np.ones(w.shape) for w in model.get_weights()])
self._test_model(model, mode="ones", model_precision=model_precision)
# Test random
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, model_precision=model_precision)
def test_tiny_inner_product_half_precision(self):
self.test_tiny_inner_product(model_precision=_MLMODEL_HALF_PRECISION)
def test_inner_product_random(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
# Define a model
model = Sequential()
model.add(Dense(1000, input_shape=(100,)))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_inner_product_half_precision_random(self):
self.test_inner_product_random(model_precision=_MLMODEL_HALF_PRECISION)
def test_dense_softmax(self):
np.random.seed(1988)
# Define a model
model = Sequential()
model.add(Dense(32, input_shape=(32,), activation="softmax"))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_dense_elu(self):
np.random.seed(1988)
# Define a model
model = Sequential()
model.add(Dense(32, input_shape=(32,), activation="elu"))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_dense_selu(self):
np.random.seed(1988)
# Define a model
model = Sequential()
model.add(Dense(32, input_shape=(32,), activation="selu"))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_housenet_random(self):
np.random.seed(1988)
num_hidden = 2
num_features = 3
# Define a model
model = Sequential()
model.add(Dense(num_hidden, input_dim=num_features))
model.add(Activation("relu"))
model.add(Dense(1, input_dim=num_features))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_conv_ones(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 10
input_shape = (input_dim, input_dim, 1)
num_kernels, kernel_height, kernel_width = 3, 5, 5
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
)
)
# Set some random weights
model.set_weights([np.ones(w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_tiny_conv_ones_half_precision(self):
self.test_tiny_conv_ones(model_precision=_MLMODEL_HALF_PRECISION)
def test_tiny_conv_random(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 10
input_shape = (input_dim, input_dim, 1)
num_kernels, kernel_height, kernel_width = 3, 5, 5
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model, model_precision=model_precision)
@unittest.skipUnless(
_is_macos() and _macos_version() >= (10, 14), "Only supported on MacOS 10.14+"
)
def test_tiny_conv_random_input_shape_dict(
self, model_precision=_MLMODEL_FULL_PRECISION
):
np.random.seed(1988)
H, W, C = 10, 20, 5
input_shape = (None, H, W, C)
num_kernels, kernel_height, kernel_width = 3, 5, 5
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=(None, None, C),
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(
model,
input_name_shape_dict={"data": input_shape},
model_precision=model_precision,
)
def test_tiny_conv_random_half_precision(self):
self.test_tiny_conv_random(model_precision=_MLMODEL_HALF_PRECISION)
def test_tiny_conv_dilated(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 10
input_shape = (input_dim, input_dim, 1)
num_kernels, kernel_height, kernel_width = 3, 5, 5
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
dilation_rate=(2, 2),
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_tiny_conv_dilated_half_precision(self):
return self.test_tiny_conv_dilated(model_precision=_MLMODEL_HALF_PRECISION)
def test_tiny_conv_dilated_rect_random(
self, model_precision=_MLMODEL_FULL_PRECISION
):
np.random.seed(1988)
input_shape = (32, 20, 3)
num_kernels = 2
kernel_height = 3
kernel_width = 3
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
dilation_rate=(2, 2),
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_tiny_conv_dilated_rect_random_half_precision(self):
return self.test_tiny_conv_dilated_rect_random(
model_precision=_MLMODEL_HALF_PRECISION
)
def test_tiny_conv_pseudo_1d_x(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 2
input_length = 5
filter_length = 1 # 3
nb_filters = 1
# Define a model
model = Sequential()
model.add(
Conv2D(
nb_filters,
kernel_size=(1, filter_length),
input_shape=(1, input_length, input_dim),
padding="valid",
)
)
# Set some random weights
model.set_weights([np.ones(w.shape) for w in model.get_weights()])
self._test_model(model, mode="linear", model_precision=model_precision)
def test_tiny_conv_pseudo_1d_x_half_precision(self):
return self.test_tiny_conv_pseudo_1d_x(model_precision=_MLMODEL_HALF_PRECISION)
def test_tiny_conv1d_same_random(self):
np.random.seed(1988)
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = Sequential()
model.add(
Conv1D(
nb_filters,
kernel_size=filter_length,
padding="same",
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_conv1d_same_random_input_shape_dict(self):
np.random.seed(1988)
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = Sequential()
model.add(
Conv1D(
nb_filters,
kernel_size=filter_length,
padding="same",
input_shape=(None, input_dim),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(
model, input_name_shape_dict={"data": (None, input_length, input_dim)}
)
def test_large_input_length_conv1d_same_random(
self, model_precision=_MLMODEL_FULL_PRECISION
):
np.random.seed(1988)
input_dim = 2
input_length = 80
filter_length = 3
nb_filters = 4
model = Sequential()
model.add(
Conv1D(
nb_filters,
kernel_size=filter_length,
padding="same",
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_large_input_length_conv1d_same_random_half_precision(self):
return self.test_large_input_length_conv1d_same_random(
model_precision=_MLMODEL_HALF_PRECISION
)
def test_tiny_conv1d_valid_random(self):
np.random.seed(1988)
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = Sequential()
model.add(
Conv1D(
nb_filters,
kernel_size=filter_length,
padding="valid",
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_conv1d_dilated_random(self):
np.random.seed(1988)
input_shape = (20, 1)
num_kernels = 2
filter_length = 3
# Define a model
model = Sequential()
model.add(
Conv1D(
num_kernels,
kernel_size=filter_length,
padding="valid",
input_shape=input_shape,
dilation_rate=3,
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_conv_rect_kernel_x(self):
np.random.seed(1988)
input_dim = 10
input_shape = (input_dim, input_dim, 1)
num_kernels = 3
kernel_height = 1
kernel_width = 5
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
padding="same",
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_conv_rect_kernel_y(self):
np.random.seed(1988)
input_dim = 10
input_shape = (input_dim, input_dim, 1)
num_kernels = 3
kernel_height = 5
kernel_width = 1
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
padding="valid",
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_conv_rect_kernel_xy(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 10
input_shape = (input_dim, input_dim, 1)
num_kernels = 3
kernel_height = 5
kernel_width = 3
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
padding="valid",
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_tiny_conv_rect_kernel_xy_half_precision(self):
self.test_tiny_conv_rect_kernel_xy(model_precision=_MLMODEL_HALF_PRECISION)
def test_flatten(self):
model = Sequential()
model.add(Flatten(input_shape=(2, 2, 2)))
self._test_model(model, mode="linear")
def test_conv_dense(self, model_precision=_MLMODEL_FULL_PRECISION):
input_shape = (48, 48, 3)
model = Sequential()
model.add(Conv2D(32, (3, 3), activation="relu", input_shape=input_shape))
model.add(Flatten())
model.add(Dense(10, activation="softmax"))
# Get the coreml model
self._test_model(model, model_precision=model_precision)
def test_conv_dense_half_precision(self):
return self.test_conv_dense(model_precision=_MLMODEL_HALF_PRECISION)
def test_conv_batchnorm_random(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 10
input_shape = (input_dim, input_dim, 3)
num_kernels = 3
kernel_height = 5
kernel_width = 5
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
)
)
model.add(BatchNormalization(epsilon=1e-5))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model, model_precision=model_precision)
def test_conv_batchnorm_random_half_precision(self):
return self.test_conv_batchnorm_random(model_precision=_MLMODEL_HALF_PRECISION)
def test_conv_batchnorm_no_gamma_no_beta(
self, model_precision=_MLMODEL_FULL_PRECISION
):
np.random.seed(1988)
input_dim = 10
input_shape = (input_dim, input_dim, 3)
num_kernels = 3
kernel_height = 5
kernel_width = 5
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
)
)
model.add(BatchNormalization(center=False, scale=False, epsilon=1e-5))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model, model_precision=model_precision)
def test_conv_batchnorm_no_gamma_no_beta_half_precision(self):
return self.test_conv_batchnorm_no_gamma_no_beta(
model_precision=_MLMODEL_HALF_PRECISION
)
def test_tiny_deconv_random(self):
# In Keras 2, deconvolution auto computes the output shape.
np.random.seed(1988)
input_dim = 13
input_shape = (input_dim, input_dim, 5)
num_kernels = 16
kernel_height = 3
kernel_width = 3
# Define a model
model = Sequential()
model.add(
Conv2DTranspose(
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
input_shape=input_shape,
padding="valid",
use_bias=False,
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_deconv_random_same_padding(self):
np.random.seed(1988)
input_dim = 14
input_shape = (input_dim, input_dim, 3)
num_kernels = 16
kernel_height = 3
kernel_width = 3
# Define a model
model = Sequential()
model.add(
Conv2DTranspose(
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
input_shape=input_shape,
padding="same",
strides=(2, 2),
use_bias=True,
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_depthwise_conv_same_pad(self):
np.random.seed(1988)
input_dim = 16
input_shape = (input_dim, input_dim, 3)
depth_multiplier = 1
kernel_height = 3
kernel_width = 3
# Define a model
model = Sequential()
model.add(
DepthwiseConv2D(
depth_multiplier=depth_multiplier,
kernel_size=(kernel_height, kernel_width),
input_shape=input_shape,
padding="same",
strides=(1, 1),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_depthwise_conv_valid_pad(self):
np.random.seed(1988)
input_dim = 16
input_shape = (input_dim, input_dim, 3)
depth_multiplier = 1
kernel_height = 3
kernel_width = 3
# Define a model
model = Sequential()
model.add(
DepthwiseConv2D(
depth_multiplier=depth_multiplier,
kernel_size=(kernel_height, kernel_width),
input_shape=input_shape,
padding="valid",
strides=(1, 1),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_depthwise_conv_same_pad_depth_multiplier(self):
np.random.seed(1988)
input_dim = 16
input_shape = (input_dim, input_dim, 3)
depth_multiplier = 4
kernel_height = 3
kernel_width = 3
# Define a model
model = Sequential()
model.add(
DepthwiseConv2D(
depth_multiplier=depth_multiplier,
kernel_size=(kernel_height, kernel_width),
input_shape=input_shape,
padding="same",
strides=(1, 1),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_depthwise_conv_valid_pad_depth_multiplier(self):
np.random.seed(1988)
input_dim = 16
input_shape = (input_dim, input_dim, 3)
depth_multiplier = 2
kernel_height = 3
kernel_width = 3
# Define a model
model = Sequential()
model.add(
DepthwiseConv2D(
depth_multiplier=depth_multiplier,
kernel_size=(kernel_height, kernel_width),
input_shape=input_shape,
padding="valid",
strides=(1, 1),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_separable_conv_valid(self):
np.random.seed(1988)
input_dim = 16
input_shape = (input_dim, input_dim, 3)
depth_multiplier = 1
kernel_height = 3
kernel_width = 3
num_kernels = 4
# Define a model
model = Sequential()
model.add(
SeparableConv2D(
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
padding="valid",
strides=(1, 1),
depth_multiplier=depth_multiplier,
input_shape=input_shape,
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_separable_conv_same_fancy(self):
np.random.seed(1988)
input_dim = 16
input_shape = (input_dim, input_dim, 3)
depth_multiplier = 1
kernel_height = 3
kernel_width = 3
num_kernels = 4
# Define a model
model = Sequential()
model.add(
SeparableConv2D(
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
padding="same",
strides=(2, 2),
activation="relu",
depth_multiplier=depth_multiplier,
input_shape=input_shape,
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_separable_conv_valid_depth_multiplier(self):
np.random.seed(1988)
input_dim = 16
input_shape = (input_dim, input_dim, 3)
depth_multiplier = 5
kernel_height = 3
kernel_width = 3
num_kernels = 40
# Define a model
model = Sequential()
model.add(
SeparableConv2D(
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
padding="valid",
strides=(1, 1),
depth_multiplier=depth_multiplier,
input_shape=input_shape,
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_separable_conv_same_fancy_depth_multiplier(
self, model_precision=_MLMODEL_FULL_PRECISION
):
np.random.seed(1988)
input_dim = 16
input_shape = (input_dim, input_dim, 3)
depth_multiplier = 2
kernel_height = 3
kernel_width = 3
num_kernels = 40
# Define a model
model = Sequential()
model.add(
SeparableConv2D(
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
padding="same",
strides=(2, 2),
activation="relu",
depth_multiplier=depth_multiplier,
input_shape=input_shape,
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_tiny_separable_conv_same_fancy_depth_multiplier_half_precision(self):
return self.test_tiny_separable_conv_same_fancy_depth_multiplier(
model_precision=_MLMODEL_HALF_PRECISION
)
def test_tiny_separable_conv_dilated(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 10
input_shape = (input_dim, input_dim, 1)
num_kernels, kernel_height, kernel_width = 3, 5, 5
# Define a model
model = Sequential()
model.add(
SeparableConv2D(
input_shape=input_shape,
dilation_rate=(2, 2),
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_tiny_separable_conv_dilated_half_precision(self):
return self.test_tiny_separable_conv_dilated(
model_precision=_MLMODEL_HALF_PRECISION
)
def test_tiny_separable_conv_dilated_rect_random(
self, model_precision=_MLMODEL_FULL_PRECISION
):
np.random.seed(1988)
input_shape = (32, 20, 3)
num_kernels = 2
kernel_height = 3
kernel_width = 3
# Define a model
model = Sequential()
model.add(
SeparableConv2D(
input_shape=input_shape,
dilation_rate=(2, 2),
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_tiny_separable_conv_dilated_rect_random_half_precision(self):
return self.test_tiny_separable_conv_dilated_rect_random(
model_precision=_MLMODEL_HALF_PRECISION
)
def test_max_pooling_no_overlap(self):
# no_overlap: pool_size = strides
model = Sequential()
model.add(
MaxPooling2D(
input_shape=(16, 16, 3), pool_size=(2, 2), strides=None, padding="valid"
)
)
self._test_model(model)
def test_max_pooling_overlap_multiple(self):
# input shape is multiple of pool_size, strides != pool_size
model = Sequential()
model.add(
MaxPooling2D(
input_shape=(18, 18, 3),
pool_size=(3, 3),
strides=(2, 2),
padding="valid",
)
)
self._test_model(model)
def test_max_pooling_overlap_odd(self):
model = Sequential()
model.add(
MaxPooling2D(
input_shape=(16, 16, 3),
pool_size=(3, 3),
strides=(2, 2),
padding="valid",
)
)
self._test_model(model)
def test_max_pooling_overlap_same(self):
model = Sequential()
model.add(
MaxPooling2D(
input_shape=(16, 16, 3),
pool_size=(3, 3),
strides=(2, 2),
padding="same",
)
)
self._test_model(model)
def test_global_max_pooling(self):
model = Sequential()
model.add(GlobalMaxPooling2D(input_shape=(16, 16, 3)))
self._test_model(model)
def test_average_pooling_no_overlap(self):
# no_overlap: pool_size = strides
model = Sequential()
model.add(
AveragePooling2D(
input_shape=(16, 16, 3), pool_size=(2, 2), strides=None, padding="valid"
)
)
self._test_model(model, delta=1e-2)
def test_average_pooling_inception_config_1(self):
# no_overlap: pool_size = strides
model = Sequential()
model.add(
AveragePooling2D(
input_shape=(16, 16, 3),
pool_size=(3, 3),
strides=(1, 1),
padding="same",
)
)
self._test_model(model, delta=1e-2)
def test_global_average_pooling(self):
model = Sequential()
model.add(GlobalAveragePooling2D(input_shape=(16, 16, 3)))
self._test_model(model)
def test_max_pooling_1d(self):
model = Sequential()
model.add(MaxPooling1D(input_shape=(16, 3), pool_size=4))
self._test_model(model)
def test_global_max_pooling_1d(self):
np.random.seed(1988)
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = Sequential()
model.add(
Conv1D(
nb_filters,
kernel_size=filter_length,
padding="same",
input_shape=(input_length, input_dim),
)
)
model.add(GlobalMaxPooling1D())
self._test_model(model)
def test_average_pooling_1d(self):
np.random.seed(1988)
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = Sequential()
model.add(
Conv1D(
nb_filters,
kernel_size=filter_length,
padding="same",
input_shape=(input_length, input_dim),
)
)
model.add(AveragePooling1D(pool_size=2))
self._test_model(model)
def test_global_average_pooling_1d(self):
np.random.seed(1988)
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = Sequential()
model.add(
Conv1D(
nb_filters,
kernel_size=filter_length,
padding="same",
input_shape=(input_length, input_dim),
)
)
model.add(GlobalAveragePooling1D())
self._test_model(model)
def test_tiny_conv_upsample_random(self):
np.random.seed(1988)
input_dim = 10
input_shape = (input_dim, input_dim, 1)
num_kernels = 3
kernel_height = 5
kernel_width = 5
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
)
)
model.add(UpSampling2D(size=2))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_conv_upsample_1d_random(self):
np.random.seed(1988)
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = Sequential()
model.add(
Conv1D(
nb_filters,
kernel_size=filter_length,
padding="same",
input_shape=(input_length, input_dim),
)
)
model.add(UpSampling1D(size=2))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_conv_crop_1d_random(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = Sequential()
model.add(
Conv1D(
nb_filters,
kernel_size=filter_length,
padding="same",
input_shape=(input_length, input_dim),
)
)
model.add(Cropping1D(cropping=2))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_tiny_conv_crop_1d_random_half_precision(self):
return self.test_tiny_conv_crop_1d_random(
model_precision=_MLMODEL_HALF_PRECISION
)
def test_tiny_conv_pad_1d_random(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 2
input_length = 10
filter_length = 3
nb_filters = 4
model = Sequential()
model.add(
Conv1D(
nb_filters,
kernel_size=filter_length,
padding="same",
input_shape=(input_length, input_dim),
)
)
model.add(ZeroPadding1D(padding=2))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_tiny_conv_pad_1d_random_half_precision(self):
return self.test_tiny_conv_pad_1d_random(
model_precision=_MLMODEL_HALF_PRECISION
)
def test_tiny_conv_causal_1d(self):
np.random.seed(1988)
model = Sequential()
model.add(Conv1D(1, 3, input_shape=(10, 1), use_bias=False, padding="causal"))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_embedding(self, model_precision=_MLMODEL_FULL_PRECISION):
model = Sequential()
num_inputs = 10
num_outputs = 3
model.add(Embedding(num_inputs, num_outputs))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, model_precision=model_precision)
def test_embedding_half_precision(self):
return self.test_embedding(model_precision=_MLMODEL_HALF_PRECISION)
def test_embedding_seq(self, model_precision=_MLMODEL_FULL_PRECISION):
model = Sequential()
num_inputs = 10
num_outputs = 3
model.add(Embedding(num_inputs, num_outputs, input_length=7))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(
model, one_dim_seq_flags=[True], model_precision=model_precision
)
def test_embedding_seq_half_precision(self):
return self.test_embedding_seq(model_precision=_MLMODEL_HALF_PRECISION)
def test_tiny_no_sequence_simple_rnn_random(self):
np.random.seed(1988)
input_dim = 10
input_length = 1
num_channels = 1
# Define a model
model = Sequential()
model.add(SimpleRNN(num_channels, input_shape=(input_length, input_dim)))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model)
def test_tiny_sequence_simple_rnn_random(self):
np.random.seed(1988)
input_dim = 2
input_length = 4
num_channels = 3
# Define a model
model = Sequential()
model.add(SimpleRNN(num_channels, input_shape=(input_length, input_dim)))
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
def test_tiny_seq2seq_rnn_random(self):
np.random.seed(1988)
input_dim = 2
input_length = 4
num_channels = 3
# Define a model
model = Sequential()
model.add(
SimpleRNN(
num_channels,
input_shape=(input_length, input_dim),
return_sequences=True,
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
def test_rnn_seq(self):
np.random.seed(1988)
input_dim = 11
input_length = 5
# Define a model
model = Sequential()
model.add(
SimpleRNN(20, input_shape=(input_length, input_dim), return_sequences=False)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
def test_rnn_seq_backwards(self):
np.random.seed(1988)
input_dim = 11
input_length = 5
# Define a model
model = Sequential()
model.add(
SimpleRNN(
20,
input_shape=(input_length, input_dim),
return_sequences=False,
go_backwards=True,
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
def test_medium_no_sequence_simple_rnn_random(self):
np.random.seed(1988)
input_dim = 10
input_length = 1
num_channels = 10
# Define a model
model = Sequential()
model.add(SimpleRNN(num_channels, input_shape=(input_length, input_dim)))
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
def test_tiny_no_sequence_lstm_zeros(self):
np.random.seed(1988)
input_dim = 1
input_length = 1
num_channels = 1
model = Sequential()
model.add(
LSTM(
num_channels,
input_shape=(input_length, input_dim),
implementation=1,
recurrent_activation="sigmoid",
)
)
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
self._test_model(model, mode="zeros")
def test_tiny_no_sequence_lstm_ones(self):
np.random.seed(1988)
input_dim = 1
input_length = 1
num_channels = 1
model = Sequential()
model.add(
LSTM(
num_channels,
input_shape=(input_length, input_dim),
implementation=1,
recurrent_activation="sigmoid",
)
)
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
self._test_model(model, mode="ones")
def test_small_no_sequence_lstm_zeros(self):
np.random.seed(1988)
input_dim = 10
input_length = 1
num_channels = 1
model = Sequential()
model.add(
LSTM(
num_channels,
input_shape=(input_length, input_dim),
implementation=2,
recurrent_activation="sigmoid",
)
)
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
self._test_model(model, mode="zeros")
def test_small_no_sequence_lstm_ones(self):
np.random.seed(1988)
input_dim = 10
input_length = 1
num_channels = 1
model = Sequential()
model.add(
LSTM(
num_channels,
input_shape=(input_length, input_dim),
implementation=2,
recurrent_activation="sigmoid",
)
)
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
self._test_model(model, mode="ones")
def test_lstm_seq(self):
np.random.seed(1988)
input_dim = 11
input_length = 5
model = Sequential()
model.add(
LSTM(20, input_shape=(input_length, input_dim), return_sequences=False)
)
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
self._test_model(model)
def test_lstm_seq_backwards(self):
np.random.seed(1988)
input_dim = 11
input_length = 5
model = Sequential()
model.add(
LSTM(
20,
input_shape=(input_length, input_dim),
return_sequences=False,
go_backwards=True,
)
)
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
self._test_model(model)
def test_medium_no_sequence_lstm_random(self):
np.random.seed(1988)
input_dim = 10
input_length = 1
num_channels = 10
# Define a model
model = Sequential()
model.add(
LSTM(
num_channels,
input_shape=(input_length, input_dim),
recurrent_activation="sigmoid",
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
def test_tiny_no_sequence_lstm_zeros_gpu(self):
np.random.seed(1988)
input_dim = 1
input_length = 1
num_channels = 1
# Define a model
model = Sequential()
model.add(
LSTM(
num_channels,
input_shape=(input_length, input_dim),
implementation=2,
recurrent_activation="sigmoid",
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model, mode="zeros")
def test_small_no_sequence_lstm_random(self):
np.random.seed(1988)
input_dim = 10
input_length = 1
num_channels = 1
# Define a model
model = Sequential()
model.add(
LSTM(
num_channels,
input_shape=(input_length, input_dim),
implementation=2,
recurrent_activation="sigmoid",
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
def test_tiny_no_sequence_gru_random(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 1
input_length = 1
num_channels = 1
num_samples = 1
# Define a model
model = Sequential()
model.add(
GRU(
num_channels,
input_shape=(input_length, input_dim),
recurrent_activation="sigmoid",
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_tiny_no_sequence_gru_random_half_precision(self):
return self.test_tiny_no_sequence_gru_random(
model_precision=_MLMODEL_HALF_PRECISION
)
def test_small_no_sequence_gru_random(self):
np.random.seed(1988)
input_dim = 10
input_length = 1
num_channels = 1
# Define a model
model = Sequential()
model.add(
GRU(
num_channels,
input_shape=(input_length, input_dim),
recurrent_activation="sigmoid",
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
def test_medium_no_sequence_gru_random(
self, model_precision=_MLMODEL_FULL_PRECISION
):
np.random.seed(1988)
input_dim = 10
input_length = 1
num_channels = 10
# Define a model
model = Sequential()
model.add(
GRU(
num_channels,
input_shape=(input_length, input_dim),
recurrent_activation="sigmoid",
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_medium_no_sequence_gru_random_half_precision(self):
return self.test_medium_no_sequence_gru_random(
model_precision=_MLMODEL_HALF_PRECISION
)
def test_gru_seq(self):
np.random.seed(1988)
input_dim = 11
input_length = 5
# Define a model
model = Sequential()
model.add(
GRU(20, input_shape=(input_length, input_dim), return_sequences=False)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
def test_gru_seq_backwards(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 11
input_length = 5
# Define a model
model = Sequential()
model.add(
GRU(
20,
input_shape=(input_length, input_dim),
return_sequences=False,
go_backwards=True,
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_gru_seq_backwards_half_precision(self):
return self.test_gru_seq_backwards(model_precision=_MLMODEL_HALF_PRECISION)
def test_tiny_no_sequence_bidir_random(
self, model_precision=_MLMODEL_FULL_PRECISION
):
np.random.seed(1988)
input_dim = 1
input_length = 1
num_channels = 1
num_samples = 1
# Define a model
model = Sequential()
model.add(
Bidirectional(
LSTM(num_channels, implementation=1, recurrent_activation="sigmoid"),
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_tiny_no_sequence_bidir_random_half_precision(self):
return self.test_tiny_no_sequence_bidir_random(
model_precision=_MLMODEL_HALF_PRECISION
)
def test_tiny_no_sequence_bidir_random_gpu(
self, model_precision=_MLMODEL_FULL_PRECISION
):
np.random.seed(1988)
input_dim = 1
input_length = 1
num_channels = 1
num_samples = 1
# Define a model
model = Sequential()
model.add(
Bidirectional(
LSTM(num_channels, implementation=2, recurrent_activation="sigmoid"),
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model, model_precision=model_precision)
def test_tiny_no_sequence_bidir_random_gpu_half_precision(self):
return self.test_tiny_no_sequence_bidir_random_gpu(
model_precision=_MLMODEL_HALF_PRECISION
)
def test_small_no_sequence_bidir_random(self):
np.random.seed(1988)
input_dim = 10
input_length = 1
num_channels = 1
# Define a model
model = Sequential()
model.add(
Bidirectional(
LSTM(num_channels, implementation=2, recurrent_activation="sigmoid"),
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
def test_medium_no_sequence_bidir_random(self):
np.random.seed(1988)
input_dim = 10
input_length = 1
num_channels = 10
# Define a model
model = Sequential()
model.add(
Bidirectional(
LSTM(num_channels, implementation=2, recurrent_activation="sigmoid"),
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
def test_medium_bidir_random_return_seq_false(self):
np.random.seed(1988)
input_dim = 7
input_length = 5
num_channels = 10
# Define a model
model = Sequential()
model.add(
Bidirectional(
LSTM(
num_channels,
return_sequences=False,
implementation=2,
recurrent_activation="sigmoid",
),
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
def test_medium_bidir_random_return_seq_true(self):
np.random.seed(1988)
input_dim = 7
input_length = 5
num_channels = 10
# Define a model
model = Sequential()
model.add(
Bidirectional(
LSTM(
num_channels,
return_sequences=True,
implementation=2,
recurrent_activation="sigmoid",
),
input_shape=(input_length, input_dim),
)
)
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
def test_bilstm_merge_modes(self):
# issue 157
def get_model(input_dim, fc_size, rnn_size, output_dim, merge_mode):
input_data = Input(name="the_input", shape=(None, input_dim))
x = TimeDistributed(Dense(fc_size, name="fc1", activation="relu",))(
input_data
)
x = Bidirectional(
LSTM(
rnn_size,
return_sequences=True,
activation="relu",
kernel_initializer="he_normal",
),
merge_mode=merge_mode,
)(x)
y_pred = TimeDistributed(
Dense(output_dim, name="y_pred", activation="softmax")
)(x)
model = Model([input_data], [y_pred])
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
return model
input_dim = 26
fc_size = 512
rnn_size = 512
output_dim = 29
for merge_mode in ["concat", "sum", "mul", "ave"]:
model = get_model(input_dim, fc_size, rnn_size, output_dim, merge_mode)
self._test_model(model)
def test_tiny_conv_elu_random(self):
np.random.seed(1988)
# Define a model
from keras.layers.advanced_activations import ELU
model = Sequential()
model.add(Conv2D(input_shape=(10, 10, 3), filters=3, kernel_size=(5, 5)))
model.add(ELU(alpha=0.8))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_tiny_conv_prelu_random(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
# Define a model
from keras.layers.advanced_activations import PReLU
model = Sequential()
model.add(
Conv2D(
input_shape=(10, 10, 3), filters=3, kernel_size=(5, 5), padding="same"
)
)
model.add(PReLU(shared_axes=[1, 2]))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model, model_precision=model_precision)
def test_tiny_conv_prelu_random_half_precision(self):
return self.test_tiny_conv_prelu_random(model_precision=_MLMODEL_HALF_PRECISION)
def test_tiny_conv_leaky_relu_random(self):
np.random.seed(1988)
# Define a model
from keras.layers.advanced_activations import LeakyReLU
model = Sequential()
model.add(
Conv2D(
input_shape=(10, 10, 3), filters=3, kernel_size=(5, 5), padding="same"
)
)
model.add(LeakyReLU(alpha=0.3))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_tiny_conv_thresholded_relu_random(self):
np.random.seed(1988)
# Define a model
from keras.layers.advanced_activations import ThresholdedReLU
model = Sequential()
model.add(
Conv2D(
input_shape=(10, 10, 3), filters=3, kernel_size=(5, 5), padding="same"
)
)
model.add(ThresholdedReLU(theta=0.8))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_tiny_concat_random(self):
np.random.seed(1988)
input_dim = 10
num_channels = 6
# Define a model
input_tensor = Input(shape=(input_dim,))
x1 = Dense(num_channels)(input_tensor)
x2 = Dense(num_channels)(x1)
x3 = Dense(num_channels)(x1)
x4 = concatenate([x2, x3])
x5 = Dense(num_channels)(x4)
model = Model(inputs=[input_tensor], outputs=[x5])
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_tiny_concat_seq_random(self):
np.random.seed(1988)
max_features = 10
embedding_dims = 4
seq_len = 5
num_channels = 6
# Define a model
input_tensor = Input(shape=(seq_len,))
x1 = Embedding(max_features, embedding_dims)(input_tensor)
x2 = Embedding(max_features, embedding_dims)(input_tensor)
x3 = concatenate([x1, x2], axis=1)
model = Model(inputs=[input_tensor], outputs=[x3])
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model, one_dim_seq_flags=[True])
def test_lstm_concat_dense_random(self):
np.random.seed(1988)
vocab_size = 1250
seq_length = 5
units = 32
# Define a model
input = Input(shape=(seq_length,))
pos = Input(shape=(seq_length, 1))
embedding = Embedding(vocab_size, 50, input_length=seq_length)(input)
concat = Concatenate(axis=2)([embedding, pos])
model = LSTM(units, return_sequences=True, stateful=False)(concat)
model = LSTM(units, return_sequences=False)(model)
model = Dense(100, activation="relu")(model)
model = Dense(vocab_size, activation="softmax")(model)
model = Model(inputs=[input, pos], outputs=model)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model, one_dim_seq_flags=[True, True])
def test_tiny_add_random(self):
np.random.seed(1988)
input_dim = 10
num_channels = 6
# Define a model
input_tensor = Input(shape=(input_dim,))
x1 = Dense(num_channels)(input_tensor)
x2 = Dense(num_channels)(x1)
x3 = Dense(num_channels)(x1)
x4 = add([x2, x3])
x5 = Dense(num_channels)(x4)
model = Model(inputs=[input_tensor], outputs=[x5])
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_tiny_mul_random(self):
np.random.seed(1988)
input_dim = 10
num_channels = 6
# Define a model
input_tensor = Input(shape=(input_dim,))
x1 = Dense(num_channels)(input_tensor)
x2 = Dense(num_channels)(x1)
x3 = Dense(num_channels)(x1)
x4 = multiply([x2, x3])
x5 = Dense(num_channels)(x4)
model = Model(inputs=[input_tensor], outputs=[x5])
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_tiny_cos_random(self):
np.random.seed(1988)
input_dim = 10
num_channels = 6
# Define a model
input_tensor = Input(shape=(input_dim,))
x1 = Dense(num_channels)(input_tensor)
x2 = Dense(num_channels)(x1)
x3 = Dense(num_channels)(x1)
x4 = dot([x2, x3], axes=-1, normalize=True)
x5 = Dense(num_channels)(x4)
model = Model(inputs=[input_tensor], outputs=[x5])
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_zeropad_simple(self):
input_shape = (48, 48, 3)
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=input_shape))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_zeropad_fancy(self):
input_shape = (48, 48, 3)
model = Sequential()
model.add(ZeroPadding2D(((2, 5), (3, 4)), input_shape=input_shape))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_crop_simple(self):
input_shape = (48, 48, 3)
model = Sequential()
model.add(Cropping2D(cropping=((2, 5), (2, 5)), input_shape=input_shape))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_tiny_permute(self):
# When input blob is 3D array (D1, D2, D3), Keras assumes the axes' meaning is
# (D1=H,D2=W,D3=C), while CoreML assumes (D1=C,D2=H,D3=W)
import itertools
for permute_order in list(itertools.permutations([1, 2, 3])):
model = Sequential()
model.add(Permute(permute_order, input_shape=(4, 3, 2)))
self._test_model(model, transpose_keras_result=True)
def test_reshape_3d(self):
model = Sequential()
model.add(Reshape((10, 1, 6), input_shape=(5, 4, 3)))
self._test_model(model, mode="linear")
def test_tiny_conv_dense_random(self):
np.random.seed(1988)
num_samples = 1
input_dim = 8
input_shape = (input_dim, input_dim, 3)
num_kernels = 2
kernel_height = 5
kernel_width = 5
hidden_dim = 4
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
)
)
model.add(Dropout(0.5))
model.add(Flatten())
model.add(Dense(hidden_dim))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_tiny_conv_dropout_random(self):
np.random.seed(1988)
num_samples = 1
input_dim = 8
input_shape = (input_dim, input_dim, 3)
num_kernels = 2
kernel_height = 5
kernel_width = 5
hidden_dim = 4
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
)
)
model.add(SpatialDropout2D(0.5))
model.add(Flatten())
model.add(Dense(hidden_dim))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_tiny_dense_tanh_fused_random(self):
np.random.seed(1988)
num_samples = 1
input_dim = 3
hidden_dim = 4
# Define a model
model = Sequential()
model.add(Dense(hidden_dim, input_shape=(input_dim,), activation="tanh"))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_tiny_conv_relu_fused_random(self):
np.random.seed(1988)
num_samples = 1
input_dim = 8
input_shape = (input_dim, input_dim, 3)
num_kernels = 2
kernel_height = 5
kernel_width = 5
hidden_dim = 4
# Define a model
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
activation="relu",
filters=num_kernels,
kernel_size=(kernel_height, kernel_width),
)
)
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Get the coreml model
self._test_model(model)
def test_tiny_time_distrbuted(self):
# as the first layer in a model
model = Sequential()
model.add(TimeDistributed(Dense(8), input_shape=(10, 16)))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_tiny_sequence_lstm(self, model_precision=_MLMODEL_FULL_PRECISION):
np.random.seed(1988)
input_dim = 1
input_length = 2
num_channels = 1
# Define a model
model = Sequential()
model.add(
LSTM(
num_channels,
input_shape=(input_length, input_dim),
implementation=1,
recurrent_activation="sigmoid",
)
)
# Set some random weights
model.set_weights(
[(np.random.rand(*w.shape) - 0.5) * 0.2 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model, delta=1e-4, model_precision=model_precision)
def test_tiny_sequence_lstm_half_precision(self):
return self.test_tiny_sequence_lstm(model_precision=_MLMODEL_HALF_PRECISION)
def test_tiny_spatial_bn(self):
np.random.seed(1988)
x_in = Input(shape=(7, 7, 2))
x = ZeroPadding2D(padding=(1, 1))(x_in)
x = BatchNormalization(axis=2)(x)
model = Model(x_in, x)
self._test_model(model, delta=1e-2)
def test_embedding_fixed_length(self):
sequence_length = 5
vocab_size = 10
embed_channels = 4
dense_units = sequence_length * embed_channels
model = Sequential()
model.add(Embedding(vocab_size, embed_channels, input_length=sequence_length))
model.add(Flatten())
model.add(Dense(dense_units))
model.add(Dense(20))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, one_dim_seq_flags=[True])
def test_conv1d_flatten(self, delta=1e-2):
model = Sequential()
model.add(AveragePooling1D(2, input_shape=(64, 9)))
model.add(Conv1D(16, 1, padding="same", activation="relu", use_bias=False))
model.add(MaxPooling1D(2))
model.add(Flatten())
model.add(Dense(units=7, activation="softmax", use_bias=False))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, delta=delta)
def test_dense_fused_act_in_td(self):
np.random.seed(1988)
x_in = Input(shape=(10, 2))
x = TimeDistributed(Dense(6, activation="softmax"))(x_in)
model = Model(inputs=[x_in], outputs=[x])
self._test_model(model, delta=1e-4)
def test_conv_batch_1d(self):
np.random.seed(1988)
vocabulary_size = 4
embedding_dimension = 6
input_length = 10
model = Sequential()
model.add(
Embedding(
vocabulary_size,
embedding_dimension,
input_length=input_length,
trainable=True,
)
)
model.add(Conv1D(5, 2))
model.add(BatchNormalization())
model.add(Activation("relu"))
model.add(MaxPooling1D(2))
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, one_dim_seq_flags=[True])
def test_lstm_td(self):
np.random.seed(1988)
input_dim = 2
input_length = 4
num_channels = 3
# Define a model
model = Sequential()
model.add(
SimpleRNN(
num_channels,
return_sequences=True,
input_shape=(input_length, input_dim),
)
)
model.add(TimeDistributed(Dense(5)))
# Set some random weights
model.set_weights(
[np.random.rand(*w.shape) * 0.2 - 0.1 for w in model.get_weights()]
)
# Test the keras model
self._test_model(model)
# Making sure that giant channel sizes get handled correctly
def test_large_channel_gpu(self):
input_shape = (20, 20, 3)
num_channels = 2049
kernel_size = 3
model = Sequential()
model.add(
Conv2D(
input_shape=input_shape,
filters=num_channels,
kernel_size=(kernel_size, kernel_size),
)
)
model.set_weights(
[(np.random.rand(*w.shape) - 0.5) * 0.2 for w in model.get_weights()]
)
self._test_model(model, delta=1e-2)
@pytest.mark.xfail(raises=Exception)
def test_large_batch_gpu(self):
batch_size = 2049
num_channels = 4
kernel_size = 3
model = Sequential()
model.add(
TimeDistributed(Dense(num_channels), input_shape=(batch_size, kernel_size))
)
model.set_weights(
[(np.random.rand(*w.shape) - 0.5) * 0.2 for w in model.get_weights()]
)
self._test_model(model, delta=1e-2)
@unittest.skipIf(not _HAS_KERAS2_TF, "Missing keras. Skipping tests.")
@pytest.mark.keras2
class KerasTopologyCorrectnessTest(KerasNumericCorrectnessTest):
def test_dangling_merge_left(self):
x1 = Input(shape=(4,), name="input1")
x2 = Input(shape=(5,), name="input2")
y1 = Dense(6, name="dense")(x2)
z = concatenate([x1, y1])
model = Model(inputs=[x1, x2], outputs=[z])
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_dangling_merge_right(self):
x1 = Input(shape=(4,), name="input1")
x2 = Input(shape=(5,), name="input2")
y1 = Dense(6, name="dense")(x2)
z = concatenate([y1, x1])
model = Model(inputs=[x1, x2], outputs=[z])
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_shared_vision(self):
digit_input = Input(shape=(27, 27, 1))
x = Conv2D(64, (3, 3))(digit_input)
x = Conv2D(64, (3, 3))(x)
out = Flatten()(x)
vision_model = Model(inputs=[digit_input], outputs=[out])
# then define the tell-digits-apart model
digit_a = Input(shape=(27, 27, 1))
digit_b = Input(shape=(27, 27, 1))
# the vision model will be shared, weights and all
out_a = vision_model(digit_a)
out_b = vision_model(digit_b)
concatenated = concatenate([out_a, out_b])
out = Dense(1, activation="sigmoid")(concatenated)
model = Model(inputs=[digit_a, digit_b], outputs=out)
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model)
def test_tiny_weight_sharing(self):
# - Dense1 -----------
# x - | |- Merge
# - Dense1 - Dense2 --
x = Input(shape=(3,))
dense = Dense(4)
y1 = dense(x)
y2 = dense(x)
y3 = Dense(4)(y2)
z = concatenate([y1, y3])
model = Model(inputs=[x], outputs=[z])
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, mode="random", delta=1e-2)
def test_tiny_multiple_outputs(self):
x = Input(shape=(3,))
y1 = Dense(4)(x)
y2 = Dense(5)(x)
model = Model([x], [y1, y2])
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, mode="random", delta=1e-2)
def test_intermediate_outputs_dense(self):
x = Input(shape=(3,))
y = Dense(4, name="intermediate_dense_y")(x)
z = Dense(5, name="intermediate_dense_z")(y)
model = Model([x], [y, z])
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, mode="random", delta=1e-2)
def test_intermediate_outputs_conv2d(self):
x = Input(shape=(8, 8, 3))
y = Conv2D(4, (3, 3), name="intermdiate_conv2d_1")(x)
z = Conv2D(5, (3, 3), name="intermdiate_conv2d_2")(y)
model = Model([x], [y, z])
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, mode="random", delta=1e-2)
def test_intermediate_outputs_conv2d_fused_act(self):
x = Input(shape=(8, 8, 3))
y = Conv2D(4, (3, 3), name="intermdiate_conv2d_1_fused", activation="relu")(x)
z = Conv2D(5, (3, 3), name="intermdiate_conv2d_2_fused", activation="relu")(y)
model = Model([x], [y, z])
model.set_weights([np.random.rand(*w.shape) - 0.5 for w in model.get_weights()])
self._test_model(model, mode="random", delta=1e-2)
def test_intermediate_outputs_conv1d(self):
x = Input(shape=(10, 3))
y = Conv1D(4, 3, name="intermdiate_conv1d_1")(x)
z = Conv1D(5, 3, name="intermdiate_conv1d_2")(y)
model = Model([x], [y, z])
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, mode="random", delta=1e-2)
def test_intermediate_outputs_conv1d_fused_act(self):
x = Input(shape=(10, 3))
y = Conv1D(4, 3, name="intermdiate_conv1d_1_fused", activation="relu")(x)
z = Conv1D(5, 3, name="intermdiate_conv1d_2_fused", activation="relu")(y)
model = Model([x], [y, z])
model.set_weights([np.random.rand(*w.shape) - 0.5 for w in model.get_weights()])
self._test_model(model, mode="random", delta=1e-2)
def test_intermediate_rcnn_1d(self):
x_in = Input(shape=(10, 2))
# Conv block 1
x = Conv1D(3, 3, padding="same", name="interm_rcnn_conv1")(x_in)
x = BatchNormalization(axis=-1, name="interm_rcnn_bn1")(x)
x = Activation("elu")(x)
x = MaxPooling1D(pool_size=2, name="interm_rcnn_pool1")(x)
out1 = x # out1.shape = (5,3)
x = GRU(6, name="gru1")(x)
out2 = x
model = Model(x_in, [out1, out2])
# model = Model(x_in, [out2])
self._test_model(model, mode="random_zero_mean", delta=1e-2)
def test_tiny_mobilenet_arch(self, model_precision=_MLMODEL_FULL_PRECISION):
def ReLU6(x, name):
if keras.__version__ >= _StrictVersion("2.2.1"):
return ReLU(6.0, name=name)(x)
else:
return Activation(relu6, name=name)(x)
img_input = Input(shape=(32, 32, 3))
x = Conv2D(
4, (3, 3), padding="same", use_bias=False, strides=(2, 2), name="conv1"
)(img_input)
x = BatchNormalization(axis=-1, name="conv1_bn")(x)
x = ReLU6(x, name="conv1_relu")
x = DepthwiseConv2D(
(3, 3),
padding="same",
depth_multiplier=1,
strides=(1, 1),
use_bias=False,
name="conv_dw_1",
)(x)
x = BatchNormalization(axis=-1, name="conv_dw_1_bn")(x)
x = ReLU6(x, name="conv_dw_1_relu")
x = Conv2D(
8, (1, 1), padding="same", use_bias=False, strides=(1, 1), name="conv_pw_1"
)(x)
x = BatchNormalization(axis=-1, name="conv_pw_1_bn")(x)
x = ReLU6(x, name="conv_pw_1_relu")
x = DepthwiseConv2D(
(3, 3),
padding="same",
depth_multiplier=1,
strides=(2, 2),
use_bias=False,
name="conv_dw_2",
)(x)
x = BatchNormalization(axis=-1, name="conv_dw_2_bn")(x)
x = ReLU6(x, name="conv_dw_2_relu")
x = Conv2D(
8, (1, 1), padding="same", use_bias=False, strides=(2, 2), name="conv_pw_2"
)(x)
x = BatchNormalization(axis=-1, name="conv_pw_2_bn")(x)
x = ReLU6(x, name="conv_pw_2_relu")
model = Model(inputs=[img_input], outputs=[x])
self._test_model(model, delta=1e-2, model_precision=model_precision)
def test_tiny_mobilenet_arch_half_precision(self):
self.test_tiny_mobilenet_arch(model_precision=_MLMODEL_HALF_PRECISION)
def test_tiny_xception(self, model_precision=_MLMODEL_FULL_PRECISION):
img_input = Input(shape=(32, 32, 3))
x = Conv2D(2, (3, 3), strides=(2, 2), use_bias=False, name="block1_conv1")(
img_input
)
x = BatchNormalization(name="block1_conv1_bn")(x)
x = Activation("relu", name="block1_conv1_act")(x)
x = Conv2D(4, (3, 3), use_bias=False, name="block1_conv2")(x)
x = BatchNormalization(name="block1_conv2_bn")(x)
x = Activation("relu", name="block1_conv2_act")(x)
residual = Conv2D(8, (1, 1), strides=(2, 2), padding="same", use_bias=False)(x)
residual = BatchNormalization()(residual)
x = SeparableConv2D(
8, (3, 3), padding="same", use_bias=False, name="block2_sepconv1"
)(x)
x = BatchNormalization(name="block2_sepconv1_bn")(x)
x = Activation("relu", name="block2_sepconv2_act")(x)
x = SeparableConv2D(
8, (3, 3), padding="same", use_bias=False, name="block2_sepconv2"
)(x)
x = BatchNormalization(name="block2_sepconv2_bn")(x)
x = MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="block2_pool")(x)
x = add([x, residual])
residual = Conv2D(16, (1, 1), strides=(2, 2), padding="same", use_bias=False)(x)
residual = BatchNormalization()(residual)
model = Model(inputs=[img_input], outputs=[residual])
self._test_model(model, delta=1e-2, model_precision=model_precision)
def test_tiny_xception_half_precision(self):
return self.test_tiny_xception(model_precision=_MLMODEL_HALF_PRECISION)
def test_nested_model_giving_output(self):
base_model = Sequential()
base_model.add(Conv2D(32, (1, 1), input_shape=(4, 4, 3)))
top_model = Sequential()
top_model.add(Flatten(input_shape=base_model.output_shape[1:]))
top_model.add(Dense(16, activation="relu"))
top_model.add(Dense(1, activation="sigmoid"))
model = Model(inputs=base_model.input, outputs=top_model(base_model.output))
self._test_model(model)
# similar to issue 269
def test_time_distributed_conv(self):
model = Sequential()
model.add(
TimeDistributed(
Conv2D(64, (3, 3), activation="relu"), input_shape=(1, 30, 30, 3)
)
)
model.add(TimeDistributed(MaxPooling2D((2, 2), strides=(1, 1))))
model.add(TimeDistributed(Conv2D(32, (4, 4), activation="relu")))
model.add(TimeDistributed(MaxPooling2D((2, 2), strides=(2, 2))))
model.add(TimeDistributed(Conv2D(32, (4, 4), activation="relu")))
model.add(TimeDistributed(MaxPooling2D((2, 2), strides=(2, 2))))
model.add(TimeDistributed(Flatten()))
model.add(Dropout(0.5))
model.add(LSTM(32, return_sequences=False, dropout=0.5))
model.add(Dense(10, activation="sigmoid"))
self._test_model(model)
@pytest.mark.slow
@pytest.mark.keras2
@unittest.skipIf(not _HAS_KERAS2_TF, "Missing keras. Skipping tests.")
class KerasNumericCorrectnessStressTest(KerasNumericCorrectnessTest):
"""
Unit test class for testing all combinations of a particular
layer.
"""
def _run_test(
self,
model,
param,
model_dir=None,
delta=1e-2,
transpose_keras_result=True,
one_dim_seq_flags=None,
model_precision=_MLMODEL_FULL_PRECISION,
):
""" Run a test on a particular model
"""
use_tmp_folder = False
if model_dir is None:
use_tmp_folder = True
model_dir = tempfile.mkdtemp()
model_path = os.path.join(model_dir, "keras.mlmodel")
# Generate some random data
nb_inputs = len(model.inputs)
if nb_inputs > 1:
input_names = []
input_data = []
coreml_input = {}
for i in range(nb_inputs):
input_shape = [1 if a is None else a for a in model.input_shape[i]]
X = _generate_data(input_shape)
feature_name = "data_%s" % i
input_names.append(feature_name)
input_data.append(X)
if one_dim_seq_flags is None:
coreml_input[feature_name] = _keras_transpose(X).astype("f")
else:
coreml_input[feature_name] = _keras_transpose(
X, one_dim_seq_flags[i]
).astype("f")
else:
input_shape = [1 if a is None else a for a in model.input_shape]
input_names = ["data"]
input_data = _generate_data(input_shape)
if one_dim_seq_flags is None:
coreml_input = {"data": _keras_transpose(input_data).astype("f")}
else:
coreml_input = {
"data": _keras_transpose(input_data, one_dim_seq_flags[0]).astype(
"f"
)
}
# Make predictions
if transpose_keras_result:
keras_preds = _keras_transpose(model.predict(input_data)).flatten()
else:
keras_preds = model.predict(input_data).flatten()
# Get the model
coreml_model = _get_coreml_model(
model, input_names, ["output"], model_precision=model_precision
)
if _is_macos() and _macos_version() >= (10, 13):
# get prediction
coreml_preds = coreml_model.predict(coreml_input)["output"].flatten()
if use_tmp_folder:
shutil.rmtree(model_dir)
self.assertEqual(
len(coreml_preds),
len(keras_preds),
msg="Failed test case %s. Lengths wrong (%s vs %s)"
% (param, len(coreml_preds), len(keras_preds)),
)
for i in range(len(keras_preds)):
max_den = max(1.0, keras_preds[i], coreml_preds[i])
self.assertAlmostEqual(
keras_preds[i] / max_den,
coreml_preds[i] / max_den,
delta=delta,
msg="Failed test case %s. Predictions wrong (%s vs %s)"
% (param, coreml_preds[i], keras_preds[i]),
)
@pytest.mark.slow
def test_activation_layer_params(self):
options = dict(
activation=[
"tanh",
"relu",
"sigmoid",
"softmax",
"softplus",
"softsign",
"hard_sigmoid",
"elu",
]
)
# Define a function that tests a model
num_channels = 10
input_dim = 10
def build_model(x):
model = Sequential()
model.add(Dense(num_channels, input_dim=input_dim))
model.add(Activation(**dict(zip(options.keys(), x))))
return x, model
# Iterate through all combinations
product = itertools.product(*options.values())
args = [build_model(p) for p in product]
# Test the cases
print("Testing a total of %s cases. This could take a while" % len(args))
for param, model in args:
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._run_test(model, param)
@pytest.mark.slow
def test_dense_layer_params(self):
options = dict(
activation=[
"relu",
"softmax",
"tanh",
"sigmoid",
"softplus",
"softsign",
"elu",
"hard_sigmoid",
],
use_bias=[True, False],
)
# Define a function that tests a model
input_shape = (10,)
num_channels = 10
def build_model(x):
kwargs = dict(zip(options.keys(), x))
model = Sequential()
model.add(Dense(num_channels, input_shape=input_shape, **kwargs))
return x, model
# Iterate through all combinations
product = itertools.product(*options.values())
args = [build_model(p) for p in product]
# Test the cases
print("Testing a total of %s cases. This could take a while" % len(args))
for param, model in args:
self._run_test(model, param)
@pytest.mark.slow
def test_upsample_layer_params(self):
options = dict(size=[(2, 2), (3, 3), (4, 4), (5, 5)])
np.random.seed(1988)
input_dim = 10
input_shape = (input_dim, input_dim, 1)
X = np.random.rand(1, *input_shape)
# Define a function that tests a model
def build_model(x):
kwargs = dict(zip(options.keys(), x))
model = Sequential()
model.add(Conv2D(filters=5, kernel_size=(7, 7), input_shape=input_shape))
model.add(UpSampling2D(**kwargs))
return x, model
# Iterate through all combinations
product = itertools.product(*options.values())
args = [build_model(p) for p in product]
# Test the cases
print("Testing a total of %s cases. This could take a while" % len(args))
for param, model in args:
self._run_test(model, param)
@pytest.mark.slow
def test_conv_layer_params(self, model_precision=_MLMODEL_FULL_PRECISION):
options = dict(
activation=[
"relu",
"tanh",
"sigmoid",
], # keras does not support softmax on 4-D
use_bias=[True, False],
padding=["same", "valid"],
filters=[1, 3, 5],
kernel_size=[[5, 5]], # fails when sizes are different
)
# Define a function that tests a model
input_shape = (10, 10, 1)
def build_model(x):
kwargs = dict(zip(options.keys(), x))
model = Sequential()
model.add(Conv2D(input_shape=input_shape, **kwargs))
return x, model
# Iterate through all combinations
product = itertools.product(*options.values())
args = [build_model(p) for p in product]
# Test the cases
print("Testing a total of %s cases. This could take a while" % len(args))
for param, model in args:
self._run_test(model, param, model_precision=model_precision)
@pytest.mark.keras2
def test_conv_layer_params_half_precision(self):
return self.test_conv_layer_params(model_precision=_MLMODEL_HALF_PRECISION)
@pytest.mark.slow
def test_dense_elementwise_params(self):
options = dict(modes=[add, multiply, concatenate, average, maximum])
def build_model(mode):
x1 = Input(shape=(3,))
x2 = Input(shape=(3,))
y1 = Dense(4)(x1)
y2 = Dense(4)(x2)
z = mode([y1, y2])
model = Model([x1, x2], z)
return mode, model
product = itertools.product(*options.values())
args = [build_model(p[0]) for p in product]
print("Testing a total of %s cases. This could take a while" % len(args))
for param, model in args:
self._run_test(model, param)
def test_vgg_16_tiny(self):
input_shape = (48, 48, 3)
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=input_shape))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(Flatten())
model.add(Dense(32, activation="relu"))
model.add(Dropout(0.5))
model.add(Dense(32, activation="relu"))
model.add(Dropout(0.5))
model.add(Dense(1000)) # activation='softmax'))
# Set some random weights
model.set_weights(
[(np.random.rand(*w.shape) - 0.5) * 0.2 for w in model.get_weights()]
)
# Get the coreml model
self._test_model(model)
def test_vgg_16_tiny_no_pooling(self):
input_shape = (48, 48, 3)
model = Sequential()
model.add(ZeroPadding2D((1, 1), input_shape=input_shape))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(ZeroPadding2D((1, 1)))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(Flatten())
model.add(Dense(32, activation="relu"))
# model.add(Dropout(0.5))
model.add(Dense(32, activation="relu"))
# model.add(Dropout(0.5))
model.add(Dense(1000)) # activation='softmax'))
# Set some random weights
model.set_weights(
[(np.random.rand(*w.shape) - 0.5) * 0.2 for w in model.get_weights()]
)
# Get the coreml model
self._test_model(model)
def test_vgg_16_tiny_no_pooling_no_padding(
self, model_precision=_MLMODEL_FULL_PRECISION
):
input_shape = (48, 48, 3)
model = Sequential()
model.add(Conv2D(32, (3, 3), activation="relu", input_shape=input_shape))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(Conv2D(32, (3, 3), activation="relu"))
model.add(Flatten())
model.add(Dense(32, activation="relu"))
model.add(Dropout(0.5))
model.add(Dense(32, activation="relu"))
model.add(Dropout(0.5))
model.add(Dense(1000, activation="softmax"))
# Get the coreml model
self._test_model(model, model_precision=model_precision)
def test_vgg_16_tiny_no_pooling_no_padding_half_precision(self):
return self.test_vgg_16_tiny_no_pooling_no_padding(
model_precision=_MLMODEL_HALF_PRECISION
)
def test_imdb_fasttext_first_2(self):
max_features = 10
max_len = 6
embedding_dims = 4
pool_length = 2
model = Sequential()
model.add(Embedding(max_features, embedding_dims, input_length=max_len))
# we add a AveragePooling1D, which will average the embeddings
# of all words in the document
model.add(AveragePooling1D(pool_size=pool_length))
self._test_model(model, one_dim_seq_flags=[True])
def test_tiny_mcrnn_td(self):
model = Sequential()
model.add(Conv2D(3, (1, 1), input_shape=(2, 4, 4), padding="same"))
model.add(AveragePooling2D(pool_size=(2, 2)))
model.add(Reshape((2, 3)))
model.add(TimeDistributed(Dense(5)))
self._test_model(model)
def test_tiny_mcrnn_recurrent(self):
model = Sequential()
model.add(Conv2D(3, (1, 1), input_shape=(2, 4, 4), padding="same"))
model.add(AveragePooling2D(pool_size=(2, 2)))
model.add(Reshape((2, 3)))
model.add(LSTM(5, recurrent_activation="sigmoid"))
self._test_model(model)
def test_tiny_mcrnn_music_tagger(self):
x_in = Input(shape=(4, 6, 1))
x = ZeroPadding2D(padding=(0, 1))(x_in)
x = BatchNormalization(axis=2, name="bn_0_freq")(x)
# Conv block 1
x = Conv2D(2, (3, 3), padding="same", name="conv1")(x)
x = BatchNormalization(axis=3, name="bn1")(x)
x = Activation("elu")(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2, 2), name="pool1")(x)
# Conv block 2
x = Conv2D(4, (3, 3), padding="same", name="conv2")(x)
x = BatchNormalization(axis=3, name="bn2")(x)
x = Activation("elu")(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2, 2), name="pool2")(x)
# Should get you (1,1,2,4)
x = Reshape((2, 4))(x)
x = GRU(32, return_sequences=True, name="gru1")(x)
x = GRU(32, return_sequences=False, name="gru2")(x)
# Create model.
model = Model(x_in, x)
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
self._test_model(model, mode="random_zero_mean", delta=1e-2)
def test_tiny_apple_manual(self):
model = Sequential()
model.add(LSTM(3, input_shape=(4, 5), recurrent_activation="sigmoid"))
model.add(Dense(5))
model.add(Activation("softmax"))
self._test_model(model)
def test_tiny_image_captioning_image_branch(self):
img_input_1 = Input(shape=(16, 16, 3))
x = Conv2D(2, (3, 3))(img_input_1)
x = Flatten()(x)
img_model = Model(inputs=[img_input_1], outputs=[x])
img_input = Input(shape=(16, 16, 3))
x = img_model(img_input)
x = Dense(8, name="cap_dense")(x)
x = Reshape((1, 8), name="cap_reshape")(x)
image_branch = Model(inputs=[img_input], outputs=[x])
self._test_model(image_branch)
def test_tiny_image_captioning_feature_merge(self):
img_input_1 = Input(shape=(16, 16, 3))
x = Conv2D(2, (3, 3))(img_input_1)
x = Flatten()(x)
img_model = Model([img_input_1], [x])
img_input = Input(shape=(16, 16, 3))
x = img_model(img_input)
x = Dense(8, name="cap_dense")(x)
x = Reshape((1, 8), name="cap_reshape")(x)
sentence_input = Input(shape=(5,)) # max_length = 5
y = Embedding(8, 8, name="cap_embedding")(sentence_input)
z = concatenate([x, y], axis=1, name="cap_merge")
combined_model = Model(inputs=[img_input, sentence_input], outputs=[z])
self._test_model(combined_model, one_dim_seq_flags=[False, True])
def test_tiny_image_captioning(self):
# use a conv layer as a image feature branch
img_input_1 = Input(shape=(16, 16, 3))
x = Conv2D(2, (3, 3))(img_input_1)
x = Flatten()(x)
img_model = Model(inputs=[img_input_1], outputs=[x])
img_input = Input(shape=(16, 16, 3))
x = img_model(img_input)
x = Dense(8, name="cap_dense")(x)
x = Reshape((1, 8), name="cap_reshape")(x)
sentence_input = Input(shape=(5,)) # max_length = 5
y = Embedding(8, 8, name="cap_embedding")(sentence_input)
z = concatenate([x, y], axis=1, name="cap_merge")
z = LSTM(4, return_sequences=True, name="cap_lstm")(z)
z = TimeDistributed(Dense(8), name="cap_timedistributed")(z)
combined_model = Model(inputs=[img_input, sentence_input], outputs=[z])
self._test_model(combined_model, one_dim_seq_flags=[False, True])
def test_tiny_babi_rnn(self):
vocab_size = 10
embed_hidden_size = 8
story_maxlen = 5
query_maxlen = 5
input_tensor_1 = Input(shape=(story_maxlen,))
x1 = Embedding(vocab_size, embed_hidden_size)(input_tensor_1)
x1 = Dropout(0.3)(x1)
input_tensor_2 = Input(shape=(query_maxlen,))
x2 = Embedding(vocab_size, embed_hidden_size)(input_tensor_2)
x2 = Dropout(0.3)(x2)
x2 = LSTM(embed_hidden_size, return_sequences=False)(x2)
x2 = RepeatVector(story_maxlen)(x2)
x3 = add([x1, x2])
x3 = LSTM(embed_hidden_size, return_sequences=False)(x3)
x3 = Dropout(0.3)(x3)
x3 = Dense(vocab_size, activation="softmax")(x3)
model = Model(inputs=[input_tensor_1, input_tensor_2], outputs=[x3])
self._test_model(model, one_dim_seq_flags=[True, True])
def test_clickbait_cnn(self, model_precision=_MLMODEL_FULL_PRECISION):
# from: https://github.com/saurabhmathur96/clickbait-detector
vocabulary_size = 500
embedding_dimension = 30
input_length = 20
model = Sequential()
model.add(
Embedding(
vocabulary_size,
embedding_dimension,
input_length=input_length,
trainable=True,
)
)
model.add(Conv1D(32, 2))
model.add(BatchNormalization())
model.add(Activation("relu"))
model.add(Conv1D(32, 2))
model.add(BatchNormalization())
model.add(Activation("relu"))
model.add(Conv1D(32, 2))
model.add(BatchNormalization())
model.add(Activation("relu"))
model.add(MaxPooling1D(17))
model.add(Flatten())
model.add(Dense(1, use_bias=True))
model.add(BatchNormalization())
model.add(Activation("sigmoid"))
self._test_model(
model, one_dim_seq_flags=[True], model_precision=model_precision
)
def test_clickbait_cnn_half_precision(self):
return self.test_clickbait_cnn(model_precision=_MLMODEL_HALF_PRECISION)
def test_model_with_duplicated_edges(self):
# Create a simple model
inputs = Input(shape=(20, 20))
activation = Activation("relu")(inputs)
cropping = Cropping1D(cropping=(1, 1))(activation)
conv1d = Conv1D(20, 3, padding="valid")(activation)
ouputs = Add()([conv1d, cropping])
model = Model(inputs, ouputs)
self._test_model(model)
@unittest.skipIf(not _HAS_KERAS2_TF, "Missing keras. Skipping tests.")
@pytest.mark.keras2
class KerasBasicConversionTest(KerasNumericCorrectnessTest):
def test_float_arraytype_flag(self):
np.random.seed(1988)
# Define a model
model = Sequential()
model.add(Dense(1000, input_shape=(100,)))
# Set some random weights
model.set_weights([np.random.rand(*w.shape) for w in model.get_weights()])
# Convert model
from coremltools.converters import keras as keras_converter
coreml_model = keras_converter.convert(model, use_float_arraytype=True)
spec = coreml_model.get_spec()
from coremltools.proto import Model_pb2 as _Model_pb2
self.assertEqual(
spec.description.input[0].type.multiArrayType.dataType,
_Model_pb2.ArrayFeatureType.FLOAT32,
)
self.assertEqual(
spec.description.output[0].type.multiArrayType.dataType,
_Model_pb2.ArrayFeatureType.FLOAT32,
)
if __name__ == "__main__":
unittest.main()
# suite = unittest.TestSuite()
# suite.addTest(KerasBasicNumericCorrectnessTest("test_lstm_concat_dense_random"))
# unittest.TextTestRunner().run(suite)
| 32.039375
| 88
| 0.580306
| 13,538
| 110,664
| 4.492835
| 0.041439
| 0.036696
| 0.03035
| 0.041135
| 0.83444
| 0.794653
| 0.765027
| 0.733937
| 0.708914
| 0.683891
| 0
| 0.033267
| 0.314131
| 110,664
| 3,453
| 89
| 32.048653
| 0.768093
| 0.066038
| 0
| 0.646184
| 0
| 0
| 0.025091
| 0.00101
| 0
| 0
| 0
| 0
| 0.002348
| 1
| 0.074755
| false
| 0.000391
| 0.01409
| 0.009785
| 0.108023
| 0.001957
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
71f77ee43d604bb608828b2a5a5a09843abe2d46
| 417
|
py
|
Python
|
savoten/domain/__init__.py
|
sato-mh/savoten
|
ef8edf842219480777f5872e65aedadc67d9dfd2
|
[
"MIT"
] | null | null | null |
savoten/domain/__init__.py
|
sato-mh/savoten
|
ef8edf842219480777f5872e65aedadc67d9dfd2
|
[
"MIT"
] | 57
|
2018-04-30T05:59:43.000Z
|
2019-12-08T12:16:35.000Z
|
savoten/domain/__init__.py
|
sato-mh/savoten
|
ef8edf842219480777f5872e65aedadc67d9dfd2
|
[
"MIT"
] | 1
|
2019-11-03T15:11:05.000Z
|
2019-11-03T15:11:05.000Z
|
from .candidate import Candidate
from .candidate_repository_interface import CandidateRepositoryInterface
from .event import Event
from .event_item import EventItem
from .event_item_repository_interface import EventItemRepositoryInterface
from .event_repository_interface import EventRepositoryInterface
from .period import Period
from .user import User
from .user_repository_interface import UserRepositoryInterface
| 41.7
| 73
| 0.892086
| 46
| 417
| 7.869565
| 0.304348
| 0.209945
| 0.276243
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086331
| 417
| 9
| 74
| 46.333333
| 0.950131
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9c299f4ce87b04054e6b5d651efcfbc2acb18b82
| 157
|
py
|
Python
|
problems/data-encoding/asc-and-ii-shall-receive/challenge.py
|
syclops/ctflab
|
02dd9b28a1b918b033ace40a53848951bbf5fdcd
|
[
"MIT"
] | null | null | null |
problems/data-encoding/asc-and-ii-shall-receive/challenge.py
|
syclops/ctflab
|
02dd9b28a1b918b033ace40a53848951bbf5fdcd
|
[
"MIT"
] | null | null | null |
problems/data-encoding/asc-and-ii-shall-receive/challenge.py
|
syclops/ctflab
|
02dd9b28a1b918b033ace40a53848951bbf5fdcd
|
[
"MIT"
] | null | null | null |
from hacksport.problem import Challenge
class Problem(Challenge):
def generate_flag(self, _):
return "plz"
def setup(self):
pass
| 14.272727
| 39
| 0.649682
| 18
| 157
| 5.555556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.267516
| 157
| 10
| 40
| 15.7
| 0.869565
| 0
| 0
| 0
| 1
| 0
| 0.019231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
9c361d501cac35a93ef99f0238542d4dac2b7c12
| 81
|
py
|
Python
|
tests/test_import.py
|
SEIRS-Plus/v2
|
3adc155400deaa4093e523ae81d2a25989888654
|
[
"MIT"
] | 1
|
2022-03-04T08:05:58.000Z
|
2022-03-04T08:05:58.000Z
|
tests/test_import.py
|
SEIRS-Plus/v2
|
3adc155400deaa4093e523ae81d2a25989888654
|
[
"MIT"
] | null | null | null |
tests/test_import.py
|
SEIRS-Plus/v2
|
3adc155400deaa4093e523ae81d2a25989888654
|
[
"MIT"
] | null | null | null |
def test_import():
from seirsplus.utils import discover
discover.models
| 16.2
| 40
| 0.740741
| 10
| 81
| 5.9
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.197531
| 81
| 4
| 41
| 20.25
| 0.907692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.666667
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9c3c711ab583433e50ede9a7f1bcb6d4efd7565b
| 300
|
py
|
Python
|
ramda/starts_with_test.py
|
jakobkolb/ramda.py
|
982b2172f4bb95b9a5b09eff8077362d6f2f0920
|
[
"MIT"
] | 56
|
2018-08-06T08:44:58.000Z
|
2022-03-17T09:49:03.000Z
|
ramda/starts_with_test.py
|
jakobkolb/ramda.py
|
982b2172f4bb95b9a5b09eff8077362d6f2f0920
|
[
"MIT"
] | 28
|
2019-06-17T11:09:52.000Z
|
2022-02-18T16:59:21.000Z
|
ramda/starts_with_test.py
|
jakobkolb/ramda.py
|
982b2172f4bb95b9a5b09eff8077362d6f2f0920
|
[
"MIT"
] | 5
|
2019-09-18T09:24:38.000Z
|
2021-07-21T08:40:23.000Z
|
from ramda import *
from ramda.private.asserts import *
def starts_with_test():
assert_equal(starts_with("a", "abc"), True)
assert_equal(starts_with("b", "abc"), False)
assert_equal(starts_with(["a"], ["a", "b", "c"]), True)
assert_equal(starts_with(["b"], ["a", "b", "c"]), False)
| 30
| 60
| 0.63
| 44
| 300
| 4.068182
| 0.386364
| 0.27933
| 0.379888
| 0.469274
| 0.536313
| 0.290503
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146667
| 300
| 9
| 61
| 33.333333
| 0.699219
| 0
| 0
| 0
| 0
| 0
| 0.053333
| 0
| 0
| 0
| 0
| 0
| 0.714286
| 1
| 0.142857
| true
| 0
| 0.285714
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
92d587908bba0b0cb0980528b903eaaa34e3d4eb
| 103
|
py
|
Python
|
gsfarc/gptool/parameter/templates/long.py
|
geospatial-services-framework/gsfpyarc
|
5ef69299fbc0b763ad4c1857ceac3ff087c0dc14
|
[
"MIT"
] | 1
|
2021-11-06T18:36:28.000Z
|
2021-11-06T18:36:28.000Z
|
gsfarc/gptool/parameter/templates/long.py
|
geospatial-services-framework/gsfpyarc
|
5ef69299fbc0b763ad4c1857ceac3ff087c0dc14
|
[
"MIT"
] | null | null | null |
gsfarc/gptool/parameter/templates/long.py
|
geospatial-services-framework/gsfpyarc
|
5ef69299fbc0b763ad4c1857ceac3ff087c0dc14
|
[
"MIT"
] | null | null | null |
"""
"""
from .basic import BASIC
class LONG(BASIC): pass
def template():
return LONG('GPLong')
| 9.363636
| 25
| 0.631068
| 13
| 103
| 5
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.203884
| 103
| 11
| 25
| 9.363636
| 0.792683
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
1314f3e2d0ef5d7966fcd1c8ba979df4f0a0c680
| 212
|
py
|
Python
|
app/controller/utils.py
|
thiaghenr/pokemon-api
|
f915b2db0a73133dd13b0529ec25fdc602334829
|
[
"MIT"
] | 3
|
2021-04-28T14:37:22.000Z
|
2022-01-20T20:16:57.000Z
|
app/controller/utils.py
|
thiaghenr/pokemon-api
|
f915b2db0a73133dd13b0529ec25fdc602334829
|
[
"MIT"
] | null | null | null |
app/controller/utils.py
|
thiaghenr/pokemon-api
|
f915b2db0a73133dd13b0529ec25fdc602334829
|
[
"MIT"
] | null | null | null |
from routers.pokemon_type import get_pokemon_type
def get_pokemon_types(pokemon_association, db):
return [get_pokemon_type(tp_id.pokemontype_id, db)
for tp_id in pokemon_association]
| 35.333333
| 57
| 0.745283
| 30
| 212
| 4.866667
| 0.533333
| 0.226027
| 0.191781
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.20283
| 212
| 6
| 57
| 35.333333
| 0.863905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
1352630081cb44e846f8c1bdc31ca473cb5899f4
| 5,240
|
py
|
Python
|
fooof/sim/transform.py
|
varman-m/fooof
|
6046c89bb3c87f30a8a368809a9d321c8c33e1a8
|
[
"Apache-2.0"
] | null | null | null |
fooof/sim/transform.py
|
varman-m/fooof
|
6046c89bb3c87f30a8a368809a9d321c8c33e1a8
|
[
"Apache-2.0"
] | null | null | null |
fooof/sim/transform.py
|
varman-m/fooof
|
6046c89bb3c87f30a8a368809a9d321c8c33e1a8
|
[
"Apache-2.0"
] | null | null | null |
"""Functions and utilities for transforming power spectra."""
import numpy as np
from fooof.sim.params import update_sim_ap_params
###################################################################################################
###################################################################################################
def rotate_spectrum(freqs, power_spectrum, delta_exponent, f_rotation):
"""Rotate a power spectrum about a frequency point, changing the power law exponent.
Parameters
----------
freqs : 1d array
Frequency axis of input power spectrum, in Hz.
power_spectrum : 1d array
Power values of the spectrum that is to be rotated.
delta_exponent : float
Change in aperiodic exponent to be applied.
Positive is clockwise rotation (steepen).
Negative is counterclockwise rotation (flatten).
f_rotation : float
Frequency value, in Hz, about which rotation is applied, at which power is unchanged.
Returns
-------
rotated_spectrum : 1d array
Rotated power spectrum.
Notes
-----
Warning: This function should only be applied to spectra without a knee.
If using simulated data, this is spectra created in 'fixed' mode.
This is because the rotation applied will is inconsistent with
the formulation of knee spectra, and will change them in an
unspecified way, not just limited to doing the rotation.
"""
mask = (np.abs(freqs) / f_rotation)**-delta_exponent
rotated_spectrum = mask * power_spectrum
return rotated_spectrum
def translate_spectrum(power_spectrum, delta_offset):
"""Translate a spectrum, changing the offset value.
Parameters
----------
power_spectrum : 1d array
Power values of the spectrum that is to be translated.
delta_offset : float
Amount to change the offset by.
Positive is an upwards translation.
Negative is a downwards translation.
Returns
-------
translated_spectrum : 1d array
Translated power spectrum.
"""
translated_spectrum = np.power(10, delta_offset, dtype='float') * power_spectrum
return translated_spectrum
def rotate_sim_spectrum(freqs, power_spectrum, delta_exponent, f_rotation, sim_params):
"""Rotate a simulated power spectrum, updating that SimParams object.
Parameters
----------
freqs : 1d array
Frequency axis of input power spectrum, in Hz.
power_spectrum : 1d array
Power values of the spectrum that is to be rotated.
delta_exponent : float
Change in aperiodic exponent to be applied.
Positive is clockwise rotation (steepen).
Negative is counterclockwise rotation (flatten).
f_rotation : float
Frequency value, in Hz, about which rotation is applied, at which power is unchanged.
sim_params : SimParams object
Object storing the current parameter definitions.
Returns
-------
rotated_spectrum : 1d array
Rotated power spectrum.
new_sim_params : SimParams object
Updated object storing the new parameter definitions.
Notes
-----
Warning: This function should only be applied to spectra without a knee.
If using simulated data, this is spectra created in 'fixed' mode.
This is because the rotation applied will is inconsistent with
the formulation of knee spectra, and will change them in an
unspecified way, not just limited to doing the rotation.
"""
rotated_spectrum = rotate_spectrum(freqs, power_spectrum, delta_exponent, f_rotation)
delta_offset = compute_rotation_offset(delta_exponent, f_rotation)
new_sim_params = update_sim_ap_params(sim_params, [delta_offset, delta_exponent])
return rotated_spectrum, new_sim_params
def translate_sim_spectrum(power_spectrum, delta_offset, sim_params):
"""Translate a simulated spectrum, updating that SimParams object.
Parameters
----------
power_spectrum : 1d array
Power values of the spectrum that is to be translated.
delta_offset : float
Amount to change the offset by.
Positive is an upwards translation.
Negative is a downwards translation.
sim_params : SimParams object
Object storing the current parameter definitions.
Returns
-------
translated_spectrum : 1d array
Translated power spectrum.
new_sim_params : SimParams object
Updated object storing the new parameter definitions.
"""
translated_spectrum = translate_spectrum(power_spectrum, delta_offset)
new_sim_params = update_sim_ap_params(sim_params, delta_offset, 'offset')
return translated_spectrum, new_sim_params
def compute_rotation_offset(delta_exponent, f_rotation):
"""Calculate the change in offset from a given rotation.
Parameters
----------
delta_exponent : float
Change in aperiodic exponent to be applied.
f_rotation : float
Frequency value, in Hz, about which rotation is applied, at which power is unchanged.
Returns
-------
float
The amount the offset will change for the specified exponent change.
"""
return -np.log10(f_rotation) * -delta_exponent
| 33.589744
| 99
| 0.677099
| 635
| 5,240
| 5.440945
| 0.182677
| 0.075253
| 0.034732
| 0.031838
| 0.792764
| 0.770188
| 0.726194
| 0.701302
| 0.64081
| 0.609551
| 0
| 0.003446
| 0.224618
| 5,240
| 155
| 100
| 33.806452
| 0.846911
| 0.646183
| 0
| 0
| 0
| 0
| 0.008814
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.1
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
b92d82977d1627b5cb97ead28b37c02fb262767a
| 7,182
|
py
|
Python
|
tests/test_update_values.py
|
LSSTDESC/healsparse
|
f6b15f570ab6335328e34006f69c3919d9fcf1c8
|
[
"BSD-3-Clause"
] | 8
|
2019-05-06T11:42:41.000Z
|
2021-10-08T14:57:12.000Z
|
tests/test_update_values.py
|
LSSTDESC/healsparse
|
f6b15f570ab6335328e34006f69c3919d9fcf1c8
|
[
"BSD-3-Clause"
] | 75
|
2019-03-01T23:25:26.000Z
|
2022-01-29T21:40:27.000Z
|
tests/test_update_values.py
|
LSSTDESC/healsparse
|
f6b15f570ab6335328e34006f69c3919d9fcf1c8
|
[
"BSD-3-Clause"
] | 3
|
2020-01-30T19:10:19.000Z
|
2022-03-08T14:57:38.000Z
|
from __future__ import division, absolute_import, print_function
import unittest
import numpy.testing as testing
import numpy as np
import healpy as hp
import healsparse
class UpdateValuesTestCase(unittest.TestCase):
def test_update_values_inorder(self):
"""
Test doing update_values, in coarse pixel order.
"""
nside_coverage = 32
nside_map = 64
dtype = np.float64
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
nfine_per_cov = 2**sparse_map._cov_map.bit_shift
test_pix = np.arange(nfine_per_cov) + nfine_per_cov * 10
test_values = np.zeros(nfine_per_cov)
sparse_map.update_values_pix(test_pix, test_values)
testing.assert_almost_equal(sparse_map.get_values_pix(test_pix), test_values)
valid_pixels = sparse_map.valid_pixels
testing.assert_equal(valid_pixels, test_pix)
test_pix2 = np.arange(nfine_per_cov) + nfine_per_cov * 16
test_values2 = np.zeros(nfine_per_cov) + 100
sparse_map.update_values_pix(test_pix2, test_values2)
testing.assert_almost_equal(sparse_map.get_values_pix(test_pix), test_values)
testing.assert_almost_equal(sparse_map.get_values_pix(test_pix2), test_values2)
valid_pixels = sparse_map.valid_pixels
testing.assert_equal(np.sort(valid_pixels), np.sort(np.concatenate((test_pix, test_pix2))))
def test_update_values_outoforder(self):
"""
Test doing updateValues, out of order.
"""
nside_coverage = 32
nside_map = 64
dtype = np.float64
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
nfine_per_cov = 2**sparse_map._cov_map.bit_shift
test_pix = np.arange(nfine_per_cov) + nfine_per_cov * 16
test_values = np.zeros(nfine_per_cov)
sparse_map.update_values_pix(test_pix, test_values)
testing.assert_almost_equal(sparse_map.get_values_pix(test_pix), test_values)
valid_pixels = sparse_map.valid_pixels
testing.assert_equal(valid_pixels, test_pix)
test_pix2 = np.arange(nfine_per_cov) + nfine_per_cov * 10
test_values2 = np.zeros(nfine_per_cov) + 100
sparse_map.update_values_pix(test_pix2, test_values2)
testing.assert_almost_equal(sparse_map.get_values_pix(test_pix), test_values)
testing.assert_almost_equal(sparse_map.get_values_pix(test_pix2), test_values2)
valid_pixels = sparse_map.valid_pixels
testing.assert_equal(np.sort(valid_pixels), np.sort(np.concatenate((test_pix, test_pix2))))
def test_update_values_nonunique(self):
"""
Test doing update_values with non-unique pixels.
"""
nside_coverage = 32
nside_map = 64
dtype = np.float64
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
pixels = np.array([0, 1, 5, 10, 0])
self.assertRaises(ValueError, sparse_map.update_values_pix, pixels, 0.0)
self.assertRaises(ValueError, sparse_map.__setitem__, pixels, 0.0)
def test_update_values_or(self):
"""
Test doing update_values with or operation.
"""
nside_coverage = 32
nside_map = 64
dtype = np.int32
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype, sentinel=0)
# Check with new unique pixels
pixels = np.arange(4)
values = np.array([2**0, 2**1, 2**2, 2**4], dtype=dtype)
sparse_map.update_values_pix(pixels, values, operation='or')
testing.assert_array_equal(sparse_map[pixels], values)
# Check with pre-existing unique pixels
values2 = np.array([2**1, 2**2, 2**3, 2**4], dtype=dtype)
sparse_map.update_values_pix(pixels, values2, operation='or')
testing.assert_array_equal(sparse_map[pixels],
values | values2)
# Check with new non-unique pixels
pixels = np.array([100, 101, 102, 100])
values = np.array([2**0, 2**1, 2**2, 2**4], dtype=dtype)
sparse_map.update_values_pix(pixels, values, operation='or')
testing.assert_array_equal(sparse_map[pixels],
np.array([2**0 | 2**4, 2**1, 2**2, 2**0 | 2**4]))
# Check with pre-existing non-unique pixels
values = np.array([2**1, 2**2, 2**3, 2**5], dtype=dtype)
sparse_map.update_values_pix(pixels, values, operation='or')
testing.assert_array_equal(sparse_map[pixels],
np.array([2**0 | 2**4 | 2**1 | 2**5,
2**1 | 2**2,
2**2 | 2**3,
2**0 | 2**4 | 2**1 | 2**5]))
def test_update_values_and(self):
"""
Test doing update_values with and operation.
"""
nside_coverage = 32
nside_map = 64
dtype = np.int32
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype, sentinel=0)
# Check with new unique pixels
pixels = np.arange(4)
values = np.array([2**0, 2**1, 2**2, 2**4], dtype=dtype)
sparse_map.update_values_pix(pixels, values, operation='and')
testing.assert_array_equal(sparse_map[pixels], values*0)
# Check with pre-existing unique pixels
sparse_map[pixels] = values
sparse_map.update_values_pix(pixels, values, operation='and')
testing.assert_array_equal(sparse_map[pixels], values)
# Check with new non-unique pixels
pixels = np.array([100, 101, 102, 100])
values = np.array([2**0, 2**1, 2**2, 2**4], dtype=dtype)
sparse_map.update_values_pix(pixels, values, operation='and')
testing.assert_array_equal(sparse_map[pixels], values*0)
# Check with pre-existing non-unique pixels
sparse_map[100] = 2**0 | 2**4
sparse_map[101] = 2**1
sparse_map[102] = 2**2
sparse_map.update_values_pix(pixels, values, operation='and')
# The first and last will be 0 because we get anded sequentially.
testing.assert_array_equal(sparse_map[pixels],
[0, 2**1, 2**2, 0])
def test_update_values_pos(self):
"""
Test doing update_values with positions (unique and non-unique).
"""
nside_coverage = 32
nside_map = 64
dtype = np.float64
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
pixels = np.array([0, 1, 5, 10, 20])
ra, dec = hp.pix2ang(nside_map, pixels, lonlat=True, nest=True)
sparse_map.update_values_pos(ra, dec, 0.0)
testing.assert_array_almost_equal(sparse_map[pixels], 0.0)
# Test non-unique raise
pixels = np.array([0, 1, 5, 10, 0])
ra, dec = hp.pix2ang(nside_map, pixels, lonlat=True, nest=True)
self.assertRaises(ValueError, sparse_map.update_values_pos, ra, dec, 0.0)
if __name__ == '__main__':
unittest.main()
| 37.020619
| 102
| 0.636452
| 980
| 7,182
| 4.381633
| 0.114286
| 0.09851
| 0.052399
| 0.073358
| 0.867024
| 0.843503
| 0.797857
| 0.760363
| 0.75361
| 0.721705
| 0
| 0.044611
| 0.257171
| 7,182
| 193
| 103
| 37.212435
| 0.760262
| 0.092036
| 0
| 0.657895
| 0
| 0
| 0.004394
| 0
| 0
| 0
| 0
| 0
| 0.192982
| 1
| 0.052632
| false
| 0
| 0.052632
| 0
| 0.114035
| 0.008772
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b94c82de5ac9e88c35092da190b0c76ffe07770c
| 55,431
|
py
|
Python
|
bokeh/core/tests/test_properties.py
|
chalmerlowe/bokeh
|
f79eef5fc64bc703c37165f9ed2e052492d74480
|
[
"BSD-3-Clause"
] | 1
|
2021-04-03T13:05:55.000Z
|
2021-04-03T13:05:55.000Z
|
bokeh/core/tests/test_properties.py
|
chalmerlowe/bokeh
|
f79eef5fc64bc703c37165f9ed2e052492d74480
|
[
"BSD-3-Clause"
] | null | null | null |
bokeh/core/tests/test_properties.py
|
chalmerlowe/bokeh
|
f79eef5fc64bc703c37165f9ed2e052492d74480
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
import datetime
import unittest
import numpy as np
from copy import copy
from bokeh.core.properties import (
HasProps, NumberSpec, ColorSpec, Bool, Int, Float, Complex, String,
Regex, List, Dict, Tuple, Array, Instance, Any, Interval, Either,
Enum, Color, Align, DashPattern, Size, Percent, Angle, AngleSpec,
DistanceSpec, Override, Include, MinMaxBounds, Responsive, TitleProp)
from bokeh.models import Plot
from bokeh.models.annotations import Title
class Basictest(unittest.TestCase):
def test_simple_class(self):
class Foo(HasProps):
x = Int(12)
y = String("hello")
z = Array(Int, np.array([1, 2, 3]))
s = String(None)
f = Foo()
self.assertEqual(f.x, 12)
self.assertEqual(f.y, "hello")
self.assert_(np.array_equal(np.array([1, 2, 3]), f.z))
self.assertEqual(f.s, None)
self.assertEqual(set(["x", "y", "z", "s"]), f.properties())
with_defaults = f.properties_with_values(include_defaults=True)
del with_defaults['z'] # can't compare equality on the np array
self.assertDictEqual(dict(x=12, y="hello", s=None), with_defaults)
without_defaults = f.properties_with_values(include_defaults=False)
# the Array is in here because it's mutable
self.assertTrue('z' in without_defaults)
del without_defaults['z']
self.assertDictEqual(dict(), without_defaults)
f.x = 18
self.assertEqual(f.x, 18)
f.y = "bar"
self.assertEqual(f.y, "bar")
without_defaults = f.properties_with_values(include_defaults=False)
del without_defaults['z']
self.assertDictEqual(dict(x=18, y="bar"), without_defaults)
def test_enum(self):
class Foo(HasProps):
x = Enum("blue", "red", "green") # the first item is the default
y = Enum("small", "medium", "large", default="large")
f = Foo()
self.assertEqual(f.x, "blue")
self.assertEqual(f.y, "large")
f.x = "red"
self.assertEqual(f.x, "red")
with self.assertRaises(ValueError):
f.x = "yellow"
f.y = "small"
self.assertEqual(f.y, "small")
with self.assertRaises(ValueError):
f.y = "yellow"
def test_inheritance(self):
class Base(HasProps):
x = Int(12)
y = String("hello")
class Child(Base):
z = Float(3.14)
c = Child()
self.assertEqual(frozenset(['x', 'y', 'z']), frozenset(c.properties()))
self.assertEqual(c.y, "hello")
def test_set(self):
class Foo(HasProps):
x = Int(12)
y = Enum("red", "blue", "green")
z = String("blah")
f = Foo()
self.assertEqual(f.x, 12)
self.assertEqual(f.y, "red")
self.assertEqual(f.z, "blah")
f.set(**dict(x=20, y="green", z="hello"))
self.assertEqual(f.x, 20)
self.assertEqual(f.y, "green")
self.assertEqual(f.z, "hello")
with self.assertRaises(ValueError):
f.set(y="orange")
def test_no_parens(self):
class Foo(HasProps):
x = Int
y = Int()
f = Foo()
self.assertEqual(f.x, f.y)
f.x = 13
self.assertEqual(f.x, 13)
def test_accurate_properties_sets(self):
class Base(HasProps):
num = Int(12)
container = List(String)
child = Instance(HasProps)
class Mixin(HasProps):
mixin_num = Int(12)
mixin_container = List(String)
mixin_child = Instance(HasProps)
class Sub(Base, Mixin):
sub_num = Int(12)
sub_container = List(String)
sub_child = Instance(HasProps)
b = Base()
self.assertEqual(set(["child"]),
b.properties_with_refs())
self.assertEqual(set(["container"]),
b.properties_containers())
self.assertEqual(set(["num", "container", "child"]),
b.properties())
self.assertEqual(set(["num", "container", "child"]),
b.properties(with_bases=True))
self.assertEqual(set(["num", "container", "child"]),
b.properties(with_bases=False))
m = Mixin()
self.assertEqual(set(["mixin_child"]),
m.properties_with_refs())
self.assertEqual(set(["mixin_container"]),
m.properties_containers())
self.assertEqual(set(["mixin_num", "mixin_container", "mixin_child"]),
m.properties())
self.assertEqual(set(["mixin_num", "mixin_container", "mixin_child"]),
m.properties(with_bases=True))
self.assertEqual(set(["mixin_num", "mixin_container", "mixin_child"]),
m.properties(with_bases=False))
s = Sub()
self.assertEqual(set(["child", "sub_child", "mixin_child"]),
s.properties_with_refs())
self.assertEqual(set(["container", "sub_container", "mixin_container"]),
s.properties_containers())
self.assertEqual(set(["num", "container", "child",
"mixin_num", "mixin_container", "mixin_child",
"sub_num", "sub_container", "sub_child"]),
s.properties())
self.assertEqual(set(["num", "container", "child",
"mixin_num", "mixin_container", "mixin_child",
"sub_num", "sub_container", "sub_child"]),
s.properties(with_bases=True))
self.assertEqual(set(["sub_num", "sub_container", "sub_child"]),
s.properties(with_bases=False))
# verify caching
self.assertIs(s.properties_with_refs(), s.properties_with_refs())
self.assertIs(s.properties_containers(), s.properties_containers())
self.assertIs(s.properties(), s.properties())
self.assertIs(s.properties(with_bases=True), s.properties(with_bases=True))
# this one isn't cached because we store it as a list __properties__ and wrap it
# in a new set every time
#self.assertIs(s.properties(with_bases=False), s.properties(with_bases=False))
def test_accurate_dataspecs(self):
class Base(HasProps):
num = NumberSpec(12)
not_a_dataspec = Float(10)
class Mixin(HasProps):
mixin_num = NumberSpec(14)
class Sub(Base, Mixin):
sub_num = NumberSpec(16)
base = Base()
mixin = Mixin()
sub = Sub()
self.assertEqual(set(["num"]), base.dataspecs())
self.assertEqual(set(["mixin_num"]), mixin.dataspecs())
self.assertEqual(set(["num", "mixin_num", "sub_num"]), sub.dataspecs())
self.assertDictEqual(dict(num=base.lookup("num")), base.dataspecs_with_props())
self.assertDictEqual(dict(mixin_num=mixin.lookup("mixin_num")), mixin.dataspecs_with_props())
self.assertDictEqual(dict(num=sub.lookup("num"),
mixin_num=sub.lookup("mixin_num"),
sub_num=sub.lookup("sub_num")),
sub.dataspecs_with_props())
def test_not_serialized(self):
class NotSerialized(HasProps):
x = Int(12, serialized=False)
y = String("hello")
o = NotSerialized()
self.assertEqual(o.x, 12)
self.assertEqual(o.y, 'hello')
# non-serialized props are still in the list of props
self.assertTrue('x' in o.properties())
self.assertTrue('y' in o.properties())
# but they aren't in the dict of props with values, since their
# values are not important (already included in other values,
# as with the _units properties)
self.assertTrue('x' not in o.properties_with_values(include_defaults=True))
self.assertTrue('y' in o.properties_with_values(include_defaults=True))
self.assertTrue('x' not in o.properties_with_values(include_defaults=False))
self.assertTrue('y' not in o.properties_with_values(include_defaults=False))
o.x = 42
o.y = 'world'
self.assertTrue('x' not in o.properties_with_values(include_defaults=True))
self.assertTrue('y' in o.properties_with_values(include_defaults=True))
self.assertTrue('x' not in o.properties_with_values(include_defaults=False))
self.assertTrue('y' in o.properties_with_values(include_defaults=False))
def test_include_defaults(self):
class IncludeDefaultsTest(HasProps):
x = Int(12)
y = String("hello")
o = IncludeDefaultsTest()
self.assertEqual(o.x, 12)
self.assertEqual(o.y, 'hello')
self.assertTrue('x' in o.properties_with_values(include_defaults=True))
self.assertTrue('y' in o.properties_with_values(include_defaults=True))
self.assertTrue('x' not in o.properties_with_values(include_defaults=False))
self.assertTrue('y' not in o.properties_with_values(include_defaults=False))
o.x = 42
o.y = 'world'
self.assertTrue('x' in o.properties_with_values(include_defaults=True))
self.assertTrue('y' in o.properties_with_values(include_defaults=True))
self.assertTrue('x' in o.properties_with_values(include_defaults=False))
self.assertTrue('y' in o.properties_with_values(include_defaults=False))
def test_include_defaults_with_kwargs(self):
class IncludeDefaultsKwargsTest(HasProps):
x = Int(12)
y = String("hello")
o = IncludeDefaultsKwargsTest(x=14, y="world")
self.assertEqual(o.x, 14)
self.assertEqual(o.y, 'world')
self.assertTrue('x' in o.properties_with_values(include_defaults=True))
self.assertTrue('y' in o.properties_with_values(include_defaults=True))
self.assertTrue('x' in o.properties_with_values(include_defaults=False))
self.assertTrue('y' in o.properties_with_values(include_defaults=False))
def test_include_defaults_set_to_same(self):
class IncludeDefaultsSetToSameTest(HasProps):
x = Int(12)
y = String("hello")
o = IncludeDefaultsSetToSameTest()
self.assertTrue('x' in o.properties_with_values(include_defaults=True))
self.assertTrue('y' in o.properties_with_values(include_defaults=True))
self.assertTrue('x' not in o.properties_with_values(include_defaults=False))
self.assertTrue('y' not in o.properties_with_values(include_defaults=False))
# this should no-op
o.x = 12
o.y = "hello"
self.assertTrue('x' in o.properties_with_values(include_defaults=True))
self.assertTrue('y' in o.properties_with_values(include_defaults=True))
self.assertTrue('x' not in o.properties_with_values(include_defaults=False))
self.assertTrue('y' not in o.properties_with_values(include_defaults=False))
def test_override_defaults(self):
class FooBase(HasProps):
x = Int(12)
class FooSub(FooBase):
x = Override(default=14)
def func_default():
return 16
class FooSubSub(FooBase):
x = Override(default=func_default)
f_base = FooBase()
f_sub = FooSub()
f_sub_sub = FooSubSub()
self.assertEqual(f_base.x, 12)
self.assertEqual(f_sub.x, 14)
self.assertEqual(f_sub_sub.x, 16)
self.assertEqual(12, f_base.properties_with_values(include_defaults=True)['x'])
self.assertEqual(14, f_sub.properties_with_values(include_defaults=True)['x'])
self.assertEqual(16, f_sub_sub.properties_with_values(include_defaults=True)['x'])
self.assertFalse('x' in f_base.properties_with_values(include_defaults=False))
self.assertFalse('x' in f_sub.properties_with_values(include_defaults=False))
self.assertFalse('x' in f_sub_sub.properties_with_values(include_defaults=False))
def test_include_delegate(self):
class IsDelegate(HasProps):
x = Int(12)
y = String("hello")
class IncludesDelegateWithPrefix(HasProps):
z = Include(IsDelegate, use_prefix=True)
z_y = Int(57) # override the Include
class IncludesDelegateWithoutPrefix(HasProps):
z = Include(IsDelegate, use_prefix=False)
y = Int(42) # override the Include
class IncludesDelegateWithoutPrefixUsingOverride(HasProps):
z = Include(IsDelegate, use_prefix=False)
y = Override(default="world") # override the Include changing just the default
o = IncludesDelegateWithoutPrefix()
self.assertEqual(o.x, 12)
self.assertEqual(o.y, 42)
self.assertFalse(hasattr(o, 'z'))
self.assertTrue('x' in o.properties_with_values(include_defaults=True))
self.assertTrue('y' in o.properties_with_values(include_defaults=True))
self.assertTrue('x' not in o.properties_with_values(include_defaults=False))
self.assertTrue('y' not in o.properties_with_values(include_defaults=False))
o = IncludesDelegateWithoutPrefixUsingOverride()
self.assertEqual(o.x, 12)
self.assertEqual(o.y, 'world')
self.assertFalse(hasattr(o, 'z'))
self.assertTrue('x' in o.properties_with_values(include_defaults=True))
self.assertTrue('y' in o.properties_with_values(include_defaults=True))
self.assertTrue('x' not in o.properties_with_values(include_defaults=False))
self.assertTrue('y' not in o.properties_with_values(include_defaults=False))
o2 = IncludesDelegateWithPrefix()
self.assertEqual(o2.z_x, 12)
self.assertEqual(o2.z_y, 57)
self.assertFalse(hasattr(o2, 'z'))
self.assertFalse(hasattr(o2, 'x'))
self.assertFalse(hasattr(o2, 'y'))
self.assertFalse('z' in o2.properties_with_values(include_defaults=True))
self.assertFalse('x' in o2.properties_with_values(include_defaults=True))
self.assertFalse('y' in o2.properties_with_values(include_defaults=True))
self.assertTrue('z_x' in o2.properties_with_values(include_defaults=True))
self.assertTrue('z_y' in o2.properties_with_values(include_defaults=True))
self.assertTrue('z_x' not in o2.properties_with_values(include_defaults=False))
self.assertTrue('z_y' not in o2.properties_with_values(include_defaults=False))
# def test_kwargs_init(self):
# class Foo(HasProps):
# x = String
# y = Int
# z = Float
# f = Foo(x = "hello", y = 14)
# self.assertEqual(f.x, "hello")
# self.assertEqual(f.y, 14)
# with self.assertRaises(TypeError):
# # This should raise a TypeError: object.__init__() takes no parameters
# g = Foo(z = 3.14, q = "blah")
class TestNumberSpec(unittest.TestCase):
def test_field(self):
class Foo(HasProps):
x = NumberSpec("xfield")
f = Foo()
self.assertEqual(f.x, "xfield")
self.assertDictEqual(Foo.__dict__["x"].serializable_value(f), {"field": "xfield"})
f.x = "my_x"
self.assertEqual(f.x, "my_x")
self.assertDictEqual(Foo.__dict__["x"].serializable_value(f), {"field": "my_x"})
def test_value(self):
class Foo(HasProps):
x = NumberSpec("xfield")
f = Foo()
self.assertEqual(f.x, "xfield")
f.x = 12
self.assertEqual(f.x, 12)
self.assertDictEqual(Foo.__dict__["x"].serializable_value(f), {"value": 12})
f.x = 15
self.assertEqual(f.x, 15)
self.assertDictEqual(Foo.__dict__["x"].serializable_value(f), {"value": 15})
f.x = dict(value=32)
self.assertDictEqual(Foo.__dict__["x"].serializable_value(f), {"value": 32})
f.x = None
self.assertIs(Foo.__dict__["x"].serializable_value(f), None)
def test_default(self):
class Foo(HasProps):
y = NumberSpec(default=12)
f = Foo()
self.assertEqual(f.y, 12)
self.assertDictEqual(Foo.__dict__["y"].serializable_value(f), {"value": 12})
f.y = "y1"
self.assertEqual(f.y, "y1")
# Once we set a concrete value, the default is ignored, because it is unused
f.y = 32
self.assertEqual(f.y, 32)
self.assertDictEqual(Foo.__dict__["y"].serializable_value(f), {"value": 32})
def test_multiple_instances(self):
class Foo(HasProps):
x = NumberSpec("xfield")
a = Foo()
b = Foo()
a.x = 13
b.x = 14
self.assertEqual(a.x, 13)
self.assertEqual(b.x, 14)
self.assertDictEqual(Foo.__dict__["x"].serializable_value(a), {"value": 13})
self.assertDictEqual(Foo.__dict__["x"].serializable_value(b), {"value": 14})
b.x = {"field": "x3"}
self.assertDictEqual(Foo.__dict__["x"].serializable_value(a), {"value": 13})
self.assertDictEqual(Foo.__dict__["x"].serializable_value(b), {"field": "x3"})
def test_autocreate_no_parens(self):
class Foo(HasProps):
x = NumberSpec
a = Foo()
self.assertIs(a.x, None)
a.x = 14
self.assertEqual(a.x, 14)
def test_set_from_json_keeps_mode(self):
class Foo(HasProps):
x = NumberSpec(default=None)
a = Foo()
self.assertIs(a.x, None)
# set as a value
a.x = 14
self.assertEqual(a.x, 14)
# set_from_json keeps the previous dict-ness or lack thereof
a.set_from_json('x', dict(value=16))
self.assertEqual(a.x, 16)
# but regular assignment overwrites the previous dict-ness
a.x = dict(value=17)
self.assertDictEqual(a.x, dict(value=17))
# set as a field
a.x = "bar"
self.assertEqual(a.x, "bar")
# set_from_json keeps the previous dict-ness or lack thereof
a.set_from_json('x', dict(field="foo"))
self.assertEqual(a.x, "foo")
# but regular assignment overwrites the previous dict-ness
a.x = dict(field="baz")
self.assertDictEqual(a.x, dict(field="baz"))
class TestAngleSpec(unittest.TestCase):
def test_default_none(self):
class Foo(HasProps):
x = AngleSpec(None)
a = Foo()
self.assertIs(a.x, None)
self.assertEqual(a.x_units, 'rad')
a.x = 14
self.assertEqual(a.x, 14)
self.assertEqual(a.x_units, 'rad')
def test_autocreate_no_parens(self):
class Foo(HasProps):
x = AngleSpec
a = Foo()
self.assertIs(a.x, None)
self.assertEqual(a.x_units, 'rad')
a.x = 14
self.assertEqual(a.x, 14)
self.assertEqual(a.x_units, 'rad')
def test_default_value(self):
class Foo(HasProps):
x = AngleSpec(default=14)
a = Foo()
self.assertEqual(a.x, 14)
self.assertEqual(a.x_units, 'rad')
def test_setting_dict_sets_units(self):
class Foo(HasProps):
x = AngleSpec(default=14)
a = Foo()
self.assertEqual(a.x, 14)
self.assertEqual(a.x_units, 'rad')
a.x = { 'value' : 180, 'units' : 'deg' }
self.assertDictEqual(a.x, { 'value' : 180 })
self.assertEqual(a.x_units, 'deg')
def test_setting_json_sets_units_keeps_dictness(self):
class Foo(HasProps):
x = AngleSpec(default=14)
a = Foo()
self.assertEqual(a.x, 14)
self.assertEqual(a.x_units, 'rad')
a.set_from_json('x', { 'value' : 180, 'units' : 'deg' })
self.assertEqual(a.x, 180)
self.assertEqual(a.x_units, 'deg')
def test_setting_dict_does_not_modify_original_dict(self):
class Foo(HasProps):
x = AngleSpec(default=14)
a = Foo()
self.assertEqual(a.x, 14)
self.assertEqual(a.x_units, 'rad')
new_value = { 'value' : 180, 'units' : 'deg' }
new_value_copy = copy(new_value)
self.assertDictEqual(new_value_copy, new_value)
a.x = new_value
self.assertDictEqual(a.x, { 'value' : 180 })
self.assertEqual(a.x_units, 'deg')
self.assertDictEqual(new_value_copy, new_value)
class TestDistanceSpec(unittest.TestCase):
def test_default_none(self):
class Foo(HasProps):
x = DistanceSpec(None)
a = Foo()
self.assertIs(a.x, None)
self.assertEqual(a.x_units, 'data')
a.x = 14
self.assertEqual(a.x, 14)
self.assertEqual(a.x_units, 'data')
def test_autocreate_no_parens(self):
class Foo(HasProps):
x = DistanceSpec
a = Foo()
self.assertIs(a.x, None)
self.assertEqual(a.x_units, 'data')
a.x = 14
self.assertEqual(a.x, 14)
self.assertEqual(a.x_units, 'data')
def test_default_value(self):
class Foo(HasProps):
x = DistanceSpec(default=14)
a = Foo()
self.assertEqual(a.x, 14)
self.assertEqual(a.x_units, 'data')
class TestColorSpec(unittest.TestCase):
def test_field(self):
class Foo(HasProps):
col = ColorSpec("colorfield")
desc = Foo.__dict__["col"]
f = Foo()
self.assertEqual(f.col, "colorfield")
self.assertDictEqual(desc.serializable_value(f), {"field": "colorfield"})
f.col = "myfield"
self.assertEqual(f.col, "myfield")
self.assertDictEqual(desc.serializable_value(f), {"field": "myfield"})
def test_field_default(self):
class Foo(HasProps):
col = ColorSpec(default="red")
desc = Foo.__dict__["col"]
f = Foo()
self.assertEqual(f.col, "red")
self.assertDictEqual(desc.serializable_value(f), {"value": "red"})
f.col = "myfield"
self.assertEqual(f.col, "myfield")
self.assertDictEqual(desc.serializable_value(f), {"field": "myfield"})
def test_default_tuple(self):
class Foo(HasProps):
col = ColorSpec(default=(128, 255, 124))
desc = Foo.__dict__["col"]
f = Foo()
self.assertEqual(f.col, (128, 255, 124))
self.assertDictEqual(desc.serializable_value(f), {"value": "rgb(128, 255, 124)"})
def test_fixed_value(self):
class Foo(HasProps):
col = ColorSpec("gray")
desc = Foo.__dict__["col"]
f = Foo()
self.assertEqual(f.col, "gray")
self.assertDictEqual(desc.serializable_value(f), {"value": "gray"})
def test_named_value(self):
class Foo(HasProps):
col = ColorSpec("colorfield")
desc = Foo.__dict__["col"]
f = Foo()
f.col = "red"
self.assertEqual(f.col, "red")
self.assertDictEqual(desc.serializable_value(f), {"value": "red"})
f.col = "forestgreen"
self.assertEqual(f.col, "forestgreen")
self.assertDictEqual(desc.serializable_value(f), {"value": "forestgreen"})
def test_case_insensitive_named_value(self):
class Foo(HasProps):
col = ColorSpec("colorfield")
desc = Foo.__dict__["col"]
f = Foo()
f.col = "RED"
self.assertEqual(f.col, "RED")
self.assertDictEqual(desc.serializable_value(f), {"value": "RED"})
f.col = "ForestGreen"
self.assertEqual(f.col, "ForestGreen")
self.assertDictEqual(desc.serializable_value(f), {"value": "ForestGreen"})
def test_named_value_set_none(self):
class Foo(HasProps):
col = ColorSpec("colorfield")
desc = Foo.__dict__["col"]
f = Foo()
f.col = None
self.assertDictEqual(desc.serializable_value(f), {"value": None})
def test_named_value_unset(self):
class Foo(HasProps):
col = ColorSpec("colorfield")
desc = Foo.__dict__["col"]
f = Foo()
self.assertDictEqual(desc.serializable_value(f), {"field": "colorfield"})
def test_named_color_overriding_default(self):
class Foo(HasProps):
col = ColorSpec("colorfield")
desc = Foo.__dict__["col"]
f = Foo()
f.col = "forestgreen"
self.assertEqual(f.col, "forestgreen")
self.assertDictEqual(desc.serializable_value(f), {"value": "forestgreen"})
f.col = "myfield"
self.assertEqual(f.col, "myfield")
self.assertDictEqual(desc.serializable_value(f), {"field": "myfield"})
def test_hex_value(self):
class Foo(HasProps):
col = ColorSpec("colorfield")
desc = Foo.__dict__["col"]
f = Foo()
f.col = "#FF004A"
self.assertEqual(f.col, "#FF004A")
self.assertDictEqual(desc.serializable_value(f), {"value": "#FF004A"})
f.col = "myfield"
self.assertEqual(f.col, "myfield")
self.assertDictEqual(desc.serializable_value(f), {"field": "myfield"})
def test_tuple_value(self):
class Foo(HasProps):
col = ColorSpec("colorfield")
desc = Foo.__dict__["col"]
f = Foo()
f.col = (128, 200, 255)
self.assertEqual(f.col, (128, 200, 255))
self.assertDictEqual(desc.serializable_value(f), {"value": "rgb(128, 200, 255)"})
f.col = "myfield"
self.assertEqual(f.col, "myfield")
self.assertDictEqual(desc.serializable_value(f), {"field": "myfield"})
f.col = (100, 150, 200, 0.5)
self.assertEqual(f.col, (100, 150, 200, 0.5))
self.assertDictEqual(desc.serializable_value(f), {"value": "rgba(100, 150, 200, 0.5)"})
def test_set_dict(self):
class Foo(HasProps):
col = ColorSpec("colorfield")
desc = Foo.__dict__["col"]
f = Foo()
f.col = {"field": "myfield"}
self.assertDictEqual(f.col, {"field": "myfield"})
f.col = "field2"
self.assertEqual(f.col, "field2")
self.assertDictEqual(desc.serializable_value(f), {"field": "field2"})
class TestDashPattern(unittest.TestCase):
def test_named(self):
class Foo(HasProps):
pat = DashPattern
f = Foo()
self.assertEqual(f.pat, [])
f.pat = "solid"
self.assertEqual(f.pat, [])
f.pat = "dashed"
self.assertEqual(f.pat, [6])
f.pat = "dotted"
self.assertEqual(f.pat, [2, 4])
f.pat = "dotdash"
self.assertEqual(f.pat, [2, 4, 6, 4])
f.pat = "dashdot"
self.assertEqual(f.pat, [6, 4, 2, 4])
def test_string(self):
class Foo(HasProps):
pat = DashPattern
f = Foo()
f.pat = ""
self.assertEqual(f.pat, [])
f.pat = "2"
self.assertEqual(f.pat, [2])
f.pat = "2 4"
self.assertEqual(f.pat, [2, 4])
f.pat = "2 4 6"
self.assertEqual(f.pat, [2, 4, 6])
with self.assertRaises(ValueError):
f.pat = "abc 6"
def test_list(self):
class Foo(HasProps):
pat = DashPattern
f = Foo()
f.pat = ()
self.assertEqual(f.pat, ())
f.pat = (2,)
self.assertEqual(f.pat, (2,))
f.pat = (2, 4)
self.assertEqual(f.pat, (2, 4))
f.pat = (2, 4, 6)
self.assertEqual(f.pat, (2, 4, 6))
with self.assertRaises(ValueError):
f.pat = (2, 4.2)
with self.assertRaises(ValueError):
f.pat = (2, "a")
def test_invalid(self):
class Foo(HasProps):
pat = DashPattern
f = Foo()
with self.assertRaises(ValueError):
f.pat = 10
with self.assertRaises(ValueError):
f.pat = 10.1
with self.assertRaises(ValueError):
f.pat = {}
class Foo(HasProps):
pass
class Bar(HasProps):
pass
class Baz(HasProps):
pass
class TestProperties(unittest.TestCase):
def test_Any(self):
prop = Any()
self.assertTrue(prop.is_valid(None))
self.assertTrue(prop.is_valid(False))
self.assertTrue(prop.is_valid(True))
self.assertTrue(prop.is_valid(0))
self.assertTrue(prop.is_valid(1))
self.assertTrue(prop.is_valid(0.0))
self.assertTrue(prop.is_valid(1.0))
self.assertTrue(prop.is_valid(1.0+1.0j))
self.assertTrue(prop.is_valid(""))
self.assertTrue(prop.is_valid(()))
self.assertTrue(prop.is_valid([]))
self.assertTrue(prop.is_valid({}))
self.assertTrue(prop.is_valid(Foo()))
def test_Bool(self):
prop = Bool()
self.assertTrue(prop.is_valid(None))
self.assertTrue(prop.is_valid(False))
self.assertTrue(prop.is_valid(True))
self.assertFalse(prop.is_valid(0))
self.assertFalse(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
try:
import numpy as np
self.assertTrue(prop.is_valid(np.bool8(False)))
self.assertTrue(prop.is_valid(np.bool8(True)))
self.assertFalse(prop.is_valid(np.int8(0)))
self.assertFalse(prop.is_valid(np.int8(1)))
self.assertFalse(prop.is_valid(np.int16(0)))
self.assertFalse(prop.is_valid(np.int16(1)))
self.assertFalse(prop.is_valid(np.int32(0)))
self.assertFalse(prop.is_valid(np.int32(1)))
self.assertFalse(prop.is_valid(np.int64(0)))
self.assertFalse(prop.is_valid(np.int64(1)))
self.assertFalse(prop.is_valid(np.uint8(0)))
self.assertFalse(prop.is_valid(np.uint8(1)))
self.assertFalse(prop.is_valid(np.uint16(0)))
self.assertFalse(prop.is_valid(np.uint16(1)))
self.assertFalse(prop.is_valid(np.uint32(0)))
self.assertFalse(prop.is_valid(np.uint32(1)))
self.assertFalse(prop.is_valid(np.uint64(0)))
self.assertFalse(prop.is_valid(np.uint64(1)))
self.assertFalse(prop.is_valid(np.float16(0)))
self.assertFalse(prop.is_valid(np.float16(1)))
self.assertFalse(prop.is_valid(np.float32(0)))
self.assertFalse(prop.is_valid(np.float32(1)))
self.assertFalse(prop.is_valid(np.float64(0)))
self.assertFalse(prop.is_valid(np.float64(1)))
self.assertFalse(prop.is_valid(np.complex64(1.0+1.0j)))
self.assertFalse(prop.is_valid(np.complex128(1.0+1.0j)))
self.assertFalse(prop.is_valid(np.complex256(1.0+1.0j)))
except ImportError:
pass
def test_Int(self):
prop = Int()
self.assertTrue(prop.is_valid(None))
# TODO: self.assertFalse(prop.is_valid(False))
# TODO: self.assertFalse(prop.is_valid(True))
self.assertTrue(prop.is_valid(0))
self.assertTrue(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
try:
import numpy as np
# TODO: self.assertFalse(prop.is_valid(np.bool8(False)))
# TODO: self.assertFalse(prop.is_valid(np.bool8(True)))
self.assertTrue(prop.is_valid(np.int8(0)))
self.assertTrue(prop.is_valid(np.int8(1)))
self.assertTrue(prop.is_valid(np.int16(0)))
self.assertTrue(prop.is_valid(np.int16(1)))
self.assertTrue(prop.is_valid(np.int32(0)))
self.assertTrue(prop.is_valid(np.int32(1)))
self.assertTrue(prop.is_valid(np.int64(0)))
self.assertTrue(prop.is_valid(np.int64(1)))
self.assertTrue(prop.is_valid(np.uint8(0)))
self.assertTrue(prop.is_valid(np.uint8(1)))
self.assertTrue(prop.is_valid(np.uint16(0)))
self.assertTrue(prop.is_valid(np.uint16(1)))
self.assertTrue(prop.is_valid(np.uint32(0)))
self.assertTrue(prop.is_valid(np.uint32(1)))
self.assertTrue(prop.is_valid(np.uint64(0)))
self.assertTrue(prop.is_valid(np.uint64(1)))
self.assertFalse(prop.is_valid(np.float16(0)))
self.assertFalse(prop.is_valid(np.float16(1)))
self.assertFalse(prop.is_valid(np.float32(0)))
self.assertFalse(prop.is_valid(np.float32(1)))
self.assertFalse(prop.is_valid(np.float64(0)))
self.assertFalse(prop.is_valid(np.float64(1)))
self.assertFalse(prop.is_valid(np.complex64(1.0+1.0j)))
self.assertFalse(prop.is_valid(np.complex128(1.0+1.0j)))
self.assertFalse(prop.is_valid(np.complex256(1.0+1.0j)))
except ImportError:
pass
def test_Float(self):
prop = Float()
self.assertTrue(prop.is_valid(None))
# TODO: self.assertFalse(prop.is_valid(False))
# TODO: self.assertFalse(prop.is_valid(True))
self.assertTrue(prop.is_valid(0))
self.assertTrue(prop.is_valid(1))
self.assertTrue(prop.is_valid(0.0))
self.assertTrue(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
try:
import numpy as np
# TODO: self.assertFalse(prop.is_valid(np.bool8(False)))
# TODO: self.assertFalse(prop.is_valid(np.bool8(True)))
self.assertTrue(prop.is_valid(np.int8(0)))
self.assertTrue(prop.is_valid(np.int8(1)))
self.assertTrue(prop.is_valid(np.int16(0)))
self.assertTrue(prop.is_valid(np.int16(1)))
self.assertTrue(prop.is_valid(np.int32(0)))
self.assertTrue(prop.is_valid(np.int32(1)))
self.assertTrue(prop.is_valid(np.int64(0)))
self.assertTrue(prop.is_valid(np.int64(1)))
self.assertTrue(prop.is_valid(np.uint8(0)))
self.assertTrue(prop.is_valid(np.uint8(1)))
self.assertTrue(prop.is_valid(np.uint16(0)))
self.assertTrue(prop.is_valid(np.uint16(1)))
self.assertTrue(prop.is_valid(np.uint32(0)))
self.assertTrue(prop.is_valid(np.uint32(1)))
self.assertTrue(prop.is_valid(np.uint64(0)))
self.assertTrue(prop.is_valid(np.uint64(1)))
self.assertTrue(prop.is_valid(np.float16(0)))
self.assertTrue(prop.is_valid(np.float16(1)))
self.assertTrue(prop.is_valid(np.float32(0)))
self.assertTrue(prop.is_valid(np.float32(1)))
self.assertTrue(prop.is_valid(np.float64(0)))
self.assertTrue(prop.is_valid(np.float64(1)))
self.assertFalse(prop.is_valid(np.complex64(1.0+1.0j)))
self.assertFalse(prop.is_valid(np.complex128(1.0+1.0j)))
self.assertFalse(prop.is_valid(np.complex256(1.0+1.0j)))
except ImportError:
pass
def test_Complex(self):
prop = Complex()
self.assertTrue(prop.is_valid(None))
# TODO: self.assertFalse(prop.is_valid(False))
# TODO: self.assertFalse(prop.is_valid(True))
self.assertTrue(prop.is_valid(0))
self.assertTrue(prop.is_valid(1))
self.assertTrue(prop.is_valid(0.0))
self.assertTrue(prop.is_valid(1.0))
self.assertTrue(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
try:
import numpy as np
# TODO: self.assertFalse(prop.is_valid(np.bool8(False)))
# TODO: self.assertFalse(prop.is_valid(np.bool8(True)))
self.assertTrue(prop.is_valid(np.int8(0)))
self.assertTrue(prop.is_valid(np.int8(1)))
self.assertTrue(prop.is_valid(np.int16(0)))
self.assertTrue(prop.is_valid(np.int16(1)))
self.assertTrue(prop.is_valid(np.int32(0)))
self.assertTrue(prop.is_valid(np.int32(1)))
self.assertTrue(prop.is_valid(np.int64(0)))
self.assertTrue(prop.is_valid(np.int64(1)))
self.assertTrue(prop.is_valid(np.uint8(0)))
self.assertTrue(prop.is_valid(np.uint8(1)))
self.assertTrue(prop.is_valid(np.uint16(0)))
self.assertTrue(prop.is_valid(np.uint16(1)))
self.assertTrue(prop.is_valid(np.uint32(0)))
self.assertTrue(prop.is_valid(np.uint32(1)))
self.assertTrue(prop.is_valid(np.uint64(0)))
self.assertTrue(prop.is_valid(np.uint64(1)))
self.assertTrue(prop.is_valid(np.float16(0)))
self.assertTrue(prop.is_valid(np.float16(1)))
self.assertTrue(prop.is_valid(np.float32(0)))
self.assertTrue(prop.is_valid(np.float32(1)))
self.assertTrue(prop.is_valid(np.float64(0)))
self.assertTrue(prop.is_valid(np.float64(1)))
self.assertTrue(prop.is_valid(np.complex64(1.0+1.0j)))
self.assertTrue(prop.is_valid(np.complex128(1.0+1.0j)))
self.assertTrue(prop.is_valid(np.complex256(1.0+1.0j)))
except ImportError:
pass
def test_String(self):
prop = String()
self.assertTrue(prop.is_valid(None))
self.assertFalse(prop.is_valid(False))
self.assertFalse(prop.is_valid(True))
self.assertFalse(prop.is_valid(0))
self.assertFalse(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertTrue(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
def test_Regex(self):
with self.assertRaises(TypeError):
prop = Regex()
prop = Regex("^x*$")
self.assertTrue(prop.is_valid(None))
self.assertFalse(prop.is_valid(False))
self.assertFalse(prop.is_valid(True))
self.assertFalse(prop.is_valid(0))
self.assertFalse(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertTrue(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
def test_List(self):
with self.assertRaises(TypeError):
prop = List()
prop = List(Int)
self.assertTrue(prop.is_valid(None))
self.assertFalse(prop.is_valid(False))
self.assertFalse(prop.is_valid(True))
self.assertFalse(prop.is_valid(0))
self.assertFalse(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertTrue(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
def test_Dict(self):
with self.assertRaises(TypeError):
prop = Dict()
prop = Dict(String, List(Int))
self.assertTrue(prop.is_valid(None))
self.assertFalse(prop.is_valid(False))
self.assertFalse(prop.is_valid(True))
self.assertFalse(prop.is_valid(0))
self.assertFalse(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertTrue(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
def test_Tuple(self):
with self.assertRaises(TypeError):
prop = Tuple()
with self.assertRaises(TypeError):
prop = Tuple(Int)
prop = Tuple(Int, String, List(Int))
self.assertTrue(prop.is_valid(None))
self.assertFalse(prop.is_valid(False))
self.assertFalse(prop.is_valid(True))
self.assertFalse(prop.is_valid(0))
self.assertFalse(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
self.assertTrue(prop.is_valid((1, "", [1, 2, 3])))
self.assertFalse(prop.is_valid((1.0, "", [1, 2, 3])))
self.assertFalse(prop.is_valid((1, True, [1, 2, 3])))
self.assertFalse(prop.is_valid((1, "", (1, 2, 3))))
self.assertFalse(prop.is_valid((1, "", [1, 2, "xyz"])))
def test_Instance(self):
with self.assertRaises(TypeError):
prop = Instance()
prop = Instance(Foo)
self.assertTrue(prop.is_valid(None))
self.assertFalse(prop.is_valid(False))
self.assertFalse(prop.is_valid(True))
self.assertFalse(prop.is_valid(0))
self.assertFalse(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertTrue(prop.is_valid(Foo()))
self.assertFalse(prop.is_valid(Bar()))
self.assertFalse(prop.is_valid(Baz()))
def test_Interval(self):
with self.assertRaises(TypeError):
prop = Interval()
with self.assertRaises(ValueError):
prop = Interval(Int, 0.0, 1.0)
prop = Interval(Int, 0, 255)
self.assertTrue(prop.is_valid(None))
# TODO: self.assertFalse(prop.is_valid(False))
# TODO: self.assertFalse(prop.is_valid(True))
self.assertTrue(prop.is_valid(0))
self.assertTrue(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
self.assertTrue(prop.is_valid(127))
self.assertFalse(prop.is_valid(-1))
self.assertFalse(prop.is_valid(256))
prop = Interval(Float, 0.0, 1.0)
self.assertTrue(prop.is_valid(None))
# TODO: self.assertFalse(prop.is_valid(False))
# TODO: self.assertFalse(prop.is_valid(True))
self.assertTrue(prop.is_valid(0))
self.assertTrue(prop.is_valid(1))
self.assertTrue(prop.is_valid(0.0))
self.assertTrue(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
self.assertTrue(prop.is_valid(0.5))
self.assertFalse(prop.is_valid(-0.001))
self.assertFalse(prop.is_valid( 1.001))
def test_Either(self):
with self.assertRaises(TypeError):
prop = Either()
prop = Either(Interval(Int, 0, 100), Regex("^x*$"), List(Int))
self.assertTrue(prop.is_valid(None))
# TODO: self.assertFalse(prop.is_valid(False))
# TODO: self.assertFalse(prop.is_valid(True))
self.assertTrue(prop.is_valid(0))
self.assertTrue(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertTrue(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertTrue(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
self.assertTrue(prop.is_valid(100))
self.assertFalse(prop.is_valid(-100))
self.assertTrue(prop.is_valid("xxx"))
self.assertFalse(prop.is_valid("yyy"))
self.assertTrue(prop.is_valid([1, 2, 3]))
self.assertFalse(prop.is_valid([1, 2, ""]))
def test_Enum(self):
with self.assertRaises(TypeError):
prop = Enum()
with self.assertRaises(TypeError):
prop = Enum("red", "green", 1)
with self.assertRaises(TypeError):
prop = Enum("red", "green", "red")
prop = Enum("red", "green", "blue")
self.assertTrue(prop.is_valid(None))
self.assertFalse(prop.is_valid(False))
self.assertFalse(prop.is_valid(True))
self.assertFalse(prop.is_valid(0))
self.assertFalse(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
self.assertTrue(prop.is_valid("red"))
self.assertTrue(prop.is_valid("green"))
self.assertTrue(prop.is_valid("blue"))
self.assertFalse(prop.is_valid("RED"))
self.assertFalse(prop.is_valid("GREEN"))
self.assertFalse(prop.is_valid("BLUE"))
self.assertFalse(prop.is_valid(" red"))
self.assertFalse(prop.is_valid(" green"))
self.assertFalse(prop.is_valid(" blue"))
from bokeh.core.enums import LineJoin
prop = Enum(LineJoin)
self.assertTrue(prop.is_valid(None))
self.assertFalse(prop.is_valid(False))
self.assertFalse(prop.is_valid(True))
self.assertFalse(prop.is_valid(0))
self.assertFalse(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
self.assertTrue(prop.is_valid("miter"))
self.assertTrue(prop.is_valid("round"))
self.assertTrue(prop.is_valid("bevel"))
self.assertFalse(prop.is_valid("MITER"))
self.assertFalse(prop.is_valid("ROUND"))
self.assertFalse(prop.is_valid("BEVEL"))
self.assertFalse(prop.is_valid(" miter"))
self.assertFalse(prop.is_valid(" round"))
self.assertFalse(prop.is_valid(" bevel"))
from bokeh.core.enums import NamedColor
prop = Enum(NamedColor)
self.assertTrue(prop.is_valid("red"))
self.assertTrue(prop.is_valid("Red"))
self.assertTrue(prop.is_valid("RED"))
def test_Color(self):
prop = Color()
self.assertTrue(prop.is_valid(None))
self.assertFalse(prop.is_valid(False))
self.assertFalse(prop.is_valid(True))
self.assertFalse(prop.is_valid(0))
self.assertFalse(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
self.assertTrue(prop.is_valid((0, 127, 255)))
self.assertFalse(prop.is_valid((0, -127, 255)))
self.assertFalse(prop.is_valid((0, 127)))
self.assertFalse(prop.is_valid((0, 127, 1.0)))
self.assertFalse(prop.is_valid((0, 127, 255, 255)))
self.assertTrue(prop.is_valid((0, 127, 255, 1.0)))
self.assertTrue(prop.is_valid("#00aaff"))
self.assertTrue(prop.is_valid("#00AAFF"))
self.assertTrue(prop.is_valid("#00AaFf"))
self.assertFalse(prop.is_valid("00aaff"))
self.assertFalse(prop.is_valid("00AAFF"))
self.assertFalse(prop.is_valid("00AaFf"))
self.assertFalse(prop.is_valid("#00AaFg"))
self.assertFalse(prop.is_valid("#00AaFff"))
self.assertTrue(prop.is_valid("blue"))
self.assertTrue(prop.is_valid("BLUE"))
self.assertFalse(prop.is_valid("foobar"))
def test_Align(self):
prop = Align() # TODO
assert prop
def test_DashPattern(self):
prop = DashPattern()
self.assertTrue(prop.is_valid(None))
self.assertFalse(prop.is_valid(False))
self.assertFalse(prop.is_valid(True))
self.assertFalse(prop.is_valid(0))
self.assertFalse(prop.is_valid(1))
self.assertFalse(prop.is_valid(0.0))
self.assertFalse(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertTrue(prop.is_valid(""))
self.assertTrue(prop.is_valid(()))
self.assertTrue(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
self.assertTrue(prop.is_valid("solid"))
self.assertTrue(prop.is_valid("dashed"))
self.assertTrue(prop.is_valid("dotted"))
self.assertTrue(prop.is_valid("dotdash"))
self.assertTrue(prop.is_valid("dashdot"))
self.assertFalse(prop.is_valid("DASHDOT"))
self.assertTrue(prop.is_valid([1, 2, 3]))
self.assertFalse(prop.is_valid([1, 2, 3.0]))
self.assertTrue(prop.is_valid("1 2 3"))
self.assertFalse(prop.is_valid("1 2 x"))
def test_Size(self):
prop = Size()
self.assertTrue(prop.is_valid(None))
# TODO: self.assertFalse(prop.is_valid(False))
# TODO: self.assertFalse(prop.is_valid(True))
self.assertTrue(prop.is_valid(0))
self.assertTrue(prop.is_valid(1))
self.assertTrue(prop.is_valid(0.0))
self.assertTrue(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
self.assertTrue(prop.is_valid(100))
self.assertTrue(prop.is_valid(100.1))
self.assertFalse(prop.is_valid(-100))
self.assertFalse(prop.is_valid(-0.001))
def test_Percent(self):
prop = Percent()
self.assertTrue(prop.is_valid(None))
# TODO: self.assertFalse(prop.is_valid(False))
# TODO: self.assertFalse(prop.is_valid(True))
self.assertTrue(prop.is_valid(0))
self.assertTrue(prop.is_valid(1))
self.assertTrue(prop.is_valid(0.0))
self.assertTrue(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
self.assertTrue(prop.is_valid(0.5))
self.assertFalse(prop.is_valid(-0.001))
self.assertFalse(prop.is_valid( 1.001))
def test_Angle(self):
prop = Angle()
self.assertTrue(prop.is_valid(None))
# TODO: self.assertFalse(prop.is_valid(False))
# TODO: self.assertFalse(prop.is_valid(True))
self.assertTrue(prop.is_valid(0))
self.assertTrue(prop.is_valid(1))
self.assertTrue(prop.is_valid(0.0))
self.assertTrue(prop.is_valid(1.0))
self.assertFalse(prop.is_valid(1.0+1.0j))
self.assertFalse(prop.is_valid(""))
self.assertFalse(prop.is_valid(()))
self.assertFalse(prop.is_valid([]))
self.assertFalse(prop.is_valid({}))
self.assertFalse(prop.is_valid(Foo()))
def test_MinMaxBounds_with_no_datetime(self):
prop = MinMaxBounds(accept_datetime=False)
# Valid values
self.assertTrue(prop.is_valid('auto'))
self.assertTrue(prop.is_valid(None))
self.assertTrue(prop.is_valid((12, 13)))
self.assertTrue(prop.is_valid((-32, -13)))
self.assertTrue(prop.is_valid((12.1, 13.1)))
self.assertTrue(prop.is_valid((None, 13.1)))
self.assertTrue(prop.is_valid((-22, None)))
# Invalid values
self.assertFalse(prop.is_valid('string'))
self.assertFalse(prop.is_valid(12))
self.assertFalse(prop.is_valid(('a', 'b')))
self.assertFalse(prop.is_valid((13, 12)))
self.assertFalse(prop.is_valid((13.1, 12.2)))
self.assertFalse(prop.is_valid((datetime.date(2012, 10, 1), datetime.date(2012, 12, 2))))
def test_MinMaxBounds_with_datetime(self):
prop = MinMaxBounds(accept_datetime=True)
# Valid values
self.assertTrue(prop.is_valid((datetime.date(2012, 10, 1), datetime.date(2012, 12, 2))))
# Invalid values
self.assertFalse(prop.is_valid((datetime.date(2012, 10, 1), 22)))
def test_HasProps_clone():
p1 = Plot(plot_width=1000)
c1 = p1.properties_with_values(include_defaults=False)
p2 = p1._clone()
c2 = p2.properties_with_values(include_defaults=False)
assert c1 == c2
def test_responsive_transforms_true_into_width():
class Foo(HasProps):
responsive = Responsive
f = Foo(responsive=True)
assert f.responsive == 'width_ar'
def test_responsive_transforms_false_into_fixed():
class Foo(HasProps):
responsive = Responsive
f = Foo(responsive=False)
assert f.responsive == 'fixed'
def test_titleprop_transforms_string_into_title_object():
class Foo(HasProps):
title = TitleProp
f = Foo(title="hello")
assert isinstance(f.title, Title)
assert f.title.text == "hello"
def test_titleprop_transforms_empty_string_into_None():
class Foo(HasProps):
title = TitleProp
f = Foo(title="")
assert f.title is None
| 37.277068
| 101
| 0.608125
| 7,065
| 55,431
| 4.615711
| 0.048125
| 0.086477
| 0.15854
| 0.186109
| 0.821098
| 0.787243
| 0.743545
| 0.68893
| 0.6567
| 0.620086
| 0
| 0.028103
| 0.247659
| 55,431
| 1,486
| 102
| 37.302153
| 0.753855
| 0.045137
| 0
| 0.581218
| 0
| 0
| 0.038267
| 0
| 0
| 0
| 0
| 0.000673
| 0.603215
| 1
| 0.060914
| false
| 0.005922
| 0.015228
| 0.000846
| 0.137902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b957e2e88ac9c8e806eec7aa6e397e9ab7990cd8
| 6,283
|
py
|
Python
|
tutorial/5_visual_tools.py
|
brjathu/manim
|
997de562ed971de11138677c348901ceb2965466
|
[
"MIT"
] | null | null | null |
tutorial/5_visual_tools.py
|
brjathu/manim
|
997de562ed971de11138677c348901ceb2965466
|
[
"MIT"
] | null | null | null |
tutorial/5_visual_tools.py
|
brjathu/manim
|
997de562ed971de11138677c348901ceb2965466
|
[
"MIT"
] | null | null | null |
from big_ol_pile_of_manim_imports import *
class MoveBraces(Scene):
def construct(self):
text=TexMobject(
"\\frac{d}{dx}f(x)g(x)=", #0
"f(x)\\frac{d}{dx}g(x)", #1
"+", #2
"g(x)\\frac{d}{dx}f(x)" #3
)
self.play(Write(text))
brace1 = Brace(text[1], UP, buff = SMALL_BUFF)
brace2 = Brace(text[3], UP, buff = SMALL_BUFF)
t1 = brace1.get_text("$g'f$")
t2 = brace2.get_text("$f'g$")
self.play(
GrowFromCenter(brace1),
FadeIn(t1),
)
self.wait()
self.play(
ReplacementTransform(brace1,brace2),
ReplacementTransform(t1,t2)
)
self.wait()
class MoveBracesCopy(Scene):
def construct(self):
text=TexMobject(
"\\frac{d}{dx}f(x)g(x)=","f(x)\\frac{d}{dx}g(x)","+",
"g(x)\\frac{d}{dx}f(x)"
)
self.play(Write(text))
brace1 = Brace(text[1], UP, buff = SMALL_BUFF)
brace2 = Brace(text[3], UP, buff = SMALL_BUFF)
t1 = brace1.get_text("$g'f$")
t2 = brace2.get_text("$f'g$")
self.play(
GrowFromCenter(brace1),
FadeIn(t1),
)
self.wait()
self.play(
ReplacementTransform(brace1.copy(),brace2),
ReplacementTransform(t1.copy(),t2)
)
self.wait()
class MoveFrameBox(Scene):
def construct(self):
text=TexMobject(
"\\frac{d}{dx}f(x)g(x)=","f(x)\\frac{d}{dx}g(x)","+",
"g(x)\\frac{d}{dx}f(x)"
)
self.play(Write(text))
framebox1 = SurroundingRectangle(text[1], buff = .1)
framebox2 = SurroundingRectangle(text[3], buff = .1)
self.play(
ShowCreation(framebox1),
)
self.wait()
self.play(
ReplacementTransform(framebox1,framebox2),
)
self.wait()
class MoveFrameBoxCopy(Scene):
def construct(self):
text=TexMobject(
"\\frac{d}{dx}f(x)g(x)=","f(x)\\frac{d}{dx}g(x)","+",
"g(x)\\frac{d}{dx}f(x)"
)
self.play(Write(text))
framebox1 = SurroundingRectangle(text[1], buff = .1)
framebox2 = SurroundingRectangle(text[3], buff = .1)
self.play(ShowCreation(framebox1))
self.wait()
self.play(
ReplacementTransform(framebox1.copy(),framebox2),
path_arc=-np.pi
)
self.wait()
class MoveFrameBoxCopy2(Scene):
def construct(self):
text=TexMobject(
"\\frac{d}{dx}f(x)g(x)=","f(x)\\frac{d}{dx}g(x)","+",
"g(x)\\frac{d}{dx}f(x)"
)
self.play(Write(text))
framebox1 = SurroundingRectangle(text[1], buff = .1)
framebox2 = SurroundingRectangle(text[3], buff = .1)
t1 = TexMobject("g'f")
t2 = TexMobject("f'g")
t1.next_to(framebox1, UP, buff=0.1)
t2.next_to(framebox2, UP, buff=0.1)
self.play(
ShowCreation(framebox1),
FadeIn(t1)
)
self.wait()
self.play(
ReplacementTransform(framebox1.copy(),framebox2),
ReplacementTransform(t1.copy(),t2),
)
self.wait()
class Arrow1(Scene):
def construct(self):
step1 = TextMobject("Step 1")
step2 = TextMobject("Step 2")
arrow = Arrow(LEFT,RIGHT)
step1.move_to(LEFT*2)
arrow.next_to(step1,RIGHT,buff = .1)
step2.next_to(arrow,RIGHT,buff = .1)
self.play(Write(step1))
self.wait()
self.play(GrowArrow(arrow))
self.play(Write(step2))
self.wait()
class Arrow2(Scene):
def construct(self):
step1 = TextMobject("Step 1")
step2 = TextMobject("Step 2")
step1.move_to(LEFT*2+DOWN*2)
step2.move_to(4*RIGHT+2*UP)
arrow1 = Arrow(step1.get_right(),step2.get_left(),buff=0.1)
arrow1.set_color(RED)
arrow2 = Arrow(step1.get_top(),step2.get_bottom(),buff=0.1)
arrow2.set_color(BLUE)
self.play(Write(step1),Write(step2))
self.play(GrowArrow(arrow1))
self.play(GrowArrow(arrow2))
self.wait()
class LineAnimation(Scene):
def construct(self):
step1 = TextMobject("Step 1")
step2 = TextMobject("Step 2")
step1.move_to(LEFT*2+DOWN*2)
step2.move_to(4*RIGHT+2*UP)
arrow1 = Line(step1.get_right(),step2.get_left(),buff=0.1)
arrow1.set_color(RED)
arrow2 = Line(step1.get_top(),step2.get_bottom(),buff=0.1)
arrow2.set_color(BLUE)
self.play(Write(step1),Write(step2))
self.play(ShowCreation(arrow1))
self.play(ShowCreation(arrow2))
self.wait()
class DashedLineAnimation(Scene):
def construct(self):
step1 = TextMobject("Step 1")
step2 = TextMobject("Step 2")
step1.move_to(LEFT*2+DOWN*2)
step2.move_to(4*RIGHT+2*UP)
arrow1 = DashedLine(step1.get_right(),step2.get_left(),buff=0.1)
arrow1.set_color(RED)
arrow2 = Line(step1.get_top(),step2.get_bottom(),buff=0.1)
arrow2.set_color(BLUE)
self.play(Write(step1),Write(step2))
self.play(ShowCreation(arrow1))
self.play(ShowCreation(arrow2))
self.wait()
class LineAnimation2(Scene):
def construct(self):
step1 = TextMobject("Step 1")
step2 = TextMobject("Step 2")
step1.move_to(LEFT*2+DOWN*2)
step2.move_to(4*RIGHT+2*UP)
line = Line(step1.get_right(),step2.get_left(),buff=0.1)
self.play(Write(step1),Write(step2))
self.play(GrowArrow(line))
self.play(
step2.next_to, step2, LEFT*2,
)
self.wait()
class LineAnimation3(Scene):
def construct(self):
step1 = TextMobject("Step 1")
step2 = TextMobject("Step 2")
step3 = step2.copy()
step1.move_to(LEFT*2+DOWN*2)
step2.move_to(4*RIGHT+2*UP)
step3.next_to(step2, LEFT*2)
line = Line(step1.get_right(),step2.get_left(),buff=0.1)
lineCopy = Line(step1.get_right(),step3.get_bottom(),buff=0.1)
self.play(Write(step1),Write(step2))
self.play(GrowArrow(line))
self.play(
ReplacementTransform(step2,step3),
ReplacementTransform(line,lineCopy)
)
self.wait()
| 31.893401
| 73
| 0.559764
| 795
| 6,283
| 4.354717
| 0.104403
| 0.076257
| 0.030329
| 0.066724
| 0.802426
| 0.770364
| 0.770364
| 0.738013
| 0.717504
| 0.714905
| 0
| 0.046968
| 0.278211
| 6,283
| 197
| 74
| 31.893401
| 0.716428
| 0.000637
| 0
| 0.66129
| 0
| 0
| 0.069572
| 0.052632
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05914
| false
| 0
| 0.005376
| 0
| 0.123656
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b9781ba01e07b55b239505162cb572b4753f0e46
| 7,114
|
py
|
Python
|
Testing/test_tanks.py
|
mohashrafy/PyNite
|
efffccdbff6727d3b271ba2937e35892d9df8c00
|
[
"MIT"
] | 1
|
2022-01-20T22:13:22.000Z
|
2022-01-20T22:13:22.000Z
|
Testing/test_tanks.py
|
mohashrafy/PyNite
|
efffccdbff6727d3b271ba2937e35892d9df8c00
|
[
"MIT"
] | null | null | null |
Testing/test_tanks.py
|
mohashrafy/PyNite
|
efffccdbff6727d3b271ba2937e35892d9df8c00
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
MIT License
Copyright (c) 2020 D. Craig Brinck, SE; tamalone1
"""
import unittest
from PyNite import FEModel3D
from PyNite.Mesh import CylinderMesh
import math
import sys
from io import StringIO
from numpy import allclose
class Test_Tanks(unittest.TestCase):
''' Tests of analyzing plate elements. '''
def setUp(self):
# Suppress printed output temporarily
sys.stdout = StringIO()
def tearDown(self):
# Reset the print function to normal
sys.stdout = sys.__stdout__
def test_PCA_7_quad(self):
"""
Tests against the example from Section 7 of "Circular Concrete Tanks
without Prestressing" by PCA.
"""
H = 20 # Tank wall height (ft)
D = 54 # Tank inside diameter (ft)
R = D/2 # Tank inside radius (ft)
t = 10/12 # Tank wall thickness (ft)
w = 62.5 # Liquid unit weight (pcf)
fc = 4000 # Concrete compressive strength (psi)
E = 57000*(fc)**0.5*(12**2) # Concrete modulus of elasticity (psf)
nu = 0.25 #0.17 # Poisson's ratio for concrete
mesh_size = 1 # Desired mesh size (ft)
center = [0, 0, 0] # Origin (X, Y, Z)
axis = 'Y' # Axis of revolution
n_o = 'N1' # Start node ID
e_o = 'Q1' # Start element ID
tank_mesh = CylinderMesh(t, E, nu, mesh_size, R, H, center, axis, n_o, e_o, element_type='Quad')
tank_model = FEModel3D()
tank_model.add_mesh(tank_mesh)
# Add hydrostatic loads to the elements
for element in tank_model.Quads.values():
avg_Y = (element.i_node.Y + element.j_node.Y
+ element.m_node.Y + element.n_node.Y)/4
p = (H - avg_Y)*w
tank_model.add_quad_surface_pressure(element.Name, p)
# Add fixed supports to the base
for node in tank_model.Nodes.values():
if node.Y == 0:
tank_model.def_support(node.Name, True, True, True, True, True, True)
# Analyze the model
tank_model.analyze()
# Max/min moment and max hoop tension as determined by PCA.
My_max_PCA = 14804/1.3/1.7
My_min_PCA = -3756/1.3/1.7
Sx_PCA = 55945/1.3/1.7
# From Timoshenko Section 117 (p. 485)
# The Timoshenko solution yields similar results to the PCA solution
beta = (3*(1 - nu**2)/(R**2*t**2))**0.25 # Equation 275
My_max_Tim = (1 - 1/(beta*H))*w*R*H*t/(12*(1 - nu**2))**0.5
Qy_max_Tim = -(w*R*H*t)/(12*(1 - nu**2))**0.5*(2*beta - 1/H)
My_max = max([element.moment(0, 1)[1, 0] for element in tank_model.Quads.values()])
My_min = min([element.moment(0, 1)[1, 0] for element in tank_model.Quads.values()])
Sx = max([element.membrane(0, 0)[0, 0] for element in tank_model.Quads.values()])*t
# MITC4 element corner stresses are unreliable. Use the maximum
# reaction at the base of the tank instead.
RMy = max([node.RxnMX['Combo 1'] for node in tank_model.Nodes.values()])/mesh_size
# Check that the PyNite calculated values are within 2% of expected
# values.
self.assertLess(abs(1 - My_max/4900), 0.02, 'Failed quad cylinder flexure test.')
self.assertLess(abs(1 - RMy/My_max_PCA), 0.02, 'Failed quad cylinder flexure test.')
self.assertLess(abs(1 - My_min/My_min_PCA), 0.02, 'Failed quad cylinder flexure test.')
self.assertGreater(My_max, 0, 'Failed quad cylinder sign convention test')
self.assertLess(abs(1 - Sx/20000), 0.02, 'Failed quad cylinder hoop tension test.')
# Render the model
# from PyNite.Visualization import render_model
# render_model(tank_model, 0.25, True, 100, True, 'My', True, 'Combo 1', labels=False, screenshot=None)
def test_PCA_7_rect(self):
"""
Tests against the example from Section 7 of "Circular Concrete Tanks
without Prestressing" by PCA.
"""
H = 20 # Tank wall height (ft)
D = 54 # Tank inside diameter (ft)
R = D/2 # Tank inside radius (ft)
t = 10/12 # Tank wall thickness (ft)
w = 62.5 # Liquid unit weight (pcf)
fc = 4000 # Concrete compressive strength (psi)
E = 57000*(fc)**0.5*(12**2) # Concrete modulus of elasticity (psf)
nu = 0.25 #0.17 # Poisson's ratio for concrete
mesh_size = 2 # Desired mesh size (ft)
center = [0, 0, 0] # Origin (X, Y, Z)
axis = 'Y' # Axis of revolution
n_o = 'N1' # Start node ID
e_o = 'Q1' # Start element ID
tank_mesh = CylinderMesh(t, E, nu, mesh_size, R, H, center, axis, n_o, e_o, element_type='Rect')
tank_model = FEModel3D()
tank_model.add_mesh(tank_mesh)
# Add hydrostatic loads to the elements
for element in tank_model.Plates.values():
avg_Y = (element.i_node.Y + element.j_node.Y
+ element.m_node.Y + element.n_node.Y)/4
p = (H - avg_Y)*w
tank_model.add_plate_surface_pressure(element.Name, p)
# Add fixed supports to the base
for node in tank_model.Nodes.values():
if node.Y == 0:
tank_model.def_support(node.Name, True, True, True, True, True, True)
# Analyze the model
tank_model.analyze()
# Max/min moment and max hoop tension as determined by PCA.
My_max_PCA = 14804/1.3/1.7
My_min_PCA = -3756/1.3/1.7
Sx_PCA = 55945/1.3/1.7
# From Timoshenko Section 117 (p. 485)
# The Timoshenko solution yields similar results to the PCA solution
beta = (3*(1 - nu**2)/(R**2*t**2))**0.25 # Equation 275
My_max_Tim = (1 - 1/(beta*H))*w*R*H*t/(12*(1 - nu**2))**0.5
Qy_max_Tim = -(w*R*H*t)/(12*(1 - nu**2))**0.5*(2*beta - 1/H)
My_max = max([element.moment(element.width()/2, element.height())[1, 0] for element in tank_model.Plates.values()])
My_min = min([element.moment(element.width()/2, element.height()/2)[1, 0] for element in tank_model.Plates.values()])
Sx = max([element.membrane(element.width()/2, element.height()/2)[0, 0] for element in tank_model.Plates.values()])*t
# Check that the PyNite calculated values are within 5% of expected
# values.
self.assertLess(abs(1 - My_max/My_max_PCA), 0.05, 'Failed plate cylinder flexure test.')
self.assertLess(abs(1 - My_min/My_min_PCA), 0.05, 'Failed plate cylinder flexure test.')
self.assertGreater(My_max, 0, 'Failed plate cylinder sign convention test')
self.assertLess(abs(1 - Sx/20000), 0.05, 'Failed plate cylinder hoop tension test.')
# # Render the model
# from PyNite.Visualization import render_model
# render_model(tank_model, 0.25, True, 100, True, 'My', True, 'Combo 1', labels=False, screenshot=None)
| 39.303867
| 125
| 0.586168
| 1,051
| 7,114
| 3.85823
| 0.201713
| 0.051048
| 0.02984
| 0.031566
| 0.860173
| 0.844143
| 0.833292
| 0.806905
| 0.769174
| 0.697904
| 0
| 0.058859
| 0.293084
| 7,114
| 180
| 126
| 39.522222
| 0.747465
| 0.304611
| 0
| 0.586957
| 0
| 0
| 0.074636
| 0
| 0
| 0
| 0
| 0
| 0.097826
| 1
| 0.043478
| false
| 0
| 0.076087
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b97a2e592ba95f7caa94d83f76de1d631dcaff77
| 116
|
py
|
Python
|
src/torchspider/__init__.py
|
ghlai9665/octopus
|
6dfe514b29f03fe9549e9f686ba07e7fcecf8ce2
|
[
"Apache-2.0"
] | 1
|
2021-12-02T17:54:05.000Z
|
2021-12-02T17:54:05.000Z
|
src/torchspider/__init__.py
|
ghlai9665/torchspider
|
6dfe514b29f03fe9549e9f686ba07e7fcecf8ce2
|
[
"Apache-2.0"
] | null | null | null |
src/torchspider/__init__.py
|
ghlai9665/torchspider
|
6dfe514b29f03fe9549e9f686ba07e7fcecf8ce2
|
[
"Apache-2.0"
] | null | null | null |
from .callbacks import *
from .learner import *
from .utils import *
from .data import *
from .torch_tools import *
| 19.333333
| 26
| 0.741379
| 16
| 116
| 5.3125
| 0.5
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 116
| 5
| 27
| 23.2
| 0.885417
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b99a2d9078eb043b23ed6f52a25f55643a8323c2
| 4,531
|
py
|
Python
|
pose_estimation/datasets.py
|
arturohernandez10/pose-interpreter-networks
|
b8cfa19bed62bdd9179f8c4a01675cd6644e8f99
|
[
"MIT"
] | 110
|
2018-08-06T01:44:24.000Z
|
2022-01-08T15:46:24.000Z
|
pose_estimation/datasets.py
|
arturohernandez10/pose-interpreter-networks
|
b8cfa19bed62bdd9179f8c4a01675cd6644e8f99
|
[
"MIT"
] | 32
|
2018-08-23T11:11:56.000Z
|
2022-01-09T23:19:06.000Z
|
pose_estimation/datasets.py
|
arturohernandez10/pose-interpreter-networks
|
b8cfa19bed62bdd9179f8c4a01675cd6644e8f99
|
[
"MIT"
] | 37
|
2018-08-06T02:14:54.000Z
|
2022-01-15T21:40:46.000Z
|
import os
import numpy as np
import torch
import torch.utils.data
from skimage.draw import circle
from skimage.measure import find_contours
from PIL import Image
class RenderedPoseDataset(torch.utils.data.Dataset):
def __init__(self, data_root, objects, subset_num, transform):
self.transform = transform
# images
image_dirs = []
self.object_indices = []
for o in objects:
image_dirs.append(os.path.join(data_root, o, 'subset_{:08}'.format(subset_num)))
for image_dir in image_dirs:
assert os.path.exists(image_dir)
self.image_paths = []
for i, image_dir in enumerate(image_dirs):
image_names = sorted(os.listdir(image_dir))
self.image_paths.extend([os.path.join(image_dir, name) for name in image_names])
self.object_indices.extend(i * np.ones(len(image_names)))
self.object_indices = np.array(self.object_indices, dtype=np.int64)
assert len(self.object_indices) == len(self.image_paths)
# poses
poses_paths = []
for o in objects:
poses_paths.append(os.path.join(data_root, o, 'poses', 'subset_{:08}.txt'.format(subset_num)))
for poses_path in poses_paths:
assert os.path.exists(poses_path)
self.poses = []
for poses_path in poses_paths:
self.poses.extend(np.loadtxt(poses_path).astype(np.float32))
assert len(self.poses) == len(self.image_paths)
def __getitem__(self, index):
object_index = self.object_indices[index]
image = Image.open(self.image_paths[index])
image = self.transform(image)
# enforce quaternion [w, x, y, z] to have positive w
target_pose = self.poses[index]
if target_pose[3] < 0:
target_pose[3:] = -target_pose[3:]
return image, target_pose, object_index
def __len__(self):
return len(self.image_paths)
class OccludedRenderedPoseDataset(torch.utils.data.Dataset):
def __init__(self, data_root, objects, subset_num, transform, max_circle_size):
self.transform = transform
self.max_circle_size = max_circle_size
# images
image_dirs = []
self.object_indices = []
for o in objects:
image_dirs.append(os.path.join(data_root, o, 'subset_{:08}'.format(subset_num)))
for image_dir in image_dirs:
assert os.path.exists(image_dir)
self.image_paths = []
for i, image_dir in enumerate(image_dirs):
image_names = sorted(os.listdir(image_dir))
self.image_paths.extend([os.path.join(image_dir, name) for name in image_names])
self.object_indices.extend(i * np.ones(len(image_names)))
self.object_indices = np.array(self.object_indices, dtype=np.int64)
assert len(self.object_indices) == len(self.image_paths)
# poses
poses_paths = []
for o in objects:
poses_paths.append(os.path.join(data_root, o, 'poses', 'subset_{:08}.txt'.format(subset_num)))
for poses_path in poses_paths:
assert os.path.exists(poses_path)
self.poses = []
for poses_path in poses_paths:
self.poses.extend(np.loadtxt(poses_path).astype(np.float32))
assert len(self.poses) == len(self.image_paths)
def __getitem__(self, index):
object_index = self.object_indices[index]
image = Image.open(self.image_paths[index])
# if possible, occlude the object
np_image = np.array(image)
contours = find_contours(np_image.mean(axis=2) if np_image.ndim == 3 else np_image, 0)
if len(contours) > 0:
contour = sorted(contours, key=lambda x: -x.shape[0])[0]
if len(contour) > 0:
occluded_image = np_image.copy()
circle_center = contour[np.random.choice(len(contour))]
r, c = circle_center
circle_size = np.random.randint(self.max_circle_size + 1)
rr, cc = circle(r, c, circle_size, shape=np_image.shape)
occluded_image[rr, cc] = 0
image = Image.fromarray(occluded_image)
image = self.transform(image)
# enforce quaternion [w, x, y, z] to have positive w
target_pose = self.poses[index]
if target_pose[3] < 0:
target_pose[3:] = -target_pose[3:]
return image, target_pose, object_index
def __len__(self):
return len(self.image_paths)
| 37.446281
| 106
| 0.629662
| 608
| 4,531
| 4.460526
| 0.174342
| 0.044248
| 0.075221
| 0.037611
| 0.755162
| 0.755162
| 0.755162
| 0.755162
| 0.755162
| 0.755162
| 0
| 0.00988
| 0.262856
| 4,531
| 120
| 107
| 37.758333
| 0.802096
| 0.035092
| 0
| 0.733333
| 0
| 0
| 0.015124
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 1
| 0.066667
| false
| 0
| 0.077778
| 0.022222
| 0.211111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b99d93df2f1ee405a6f64312b4db595558878a79
| 11,772
|
py
|
Python
|
src/box_coder.py
|
b1xian/jinnan_yolo_baseline
|
539d748d7aa60ab0e3c964eab333af46b806e1db
|
[
"MIT"
] | 2
|
2019-03-27T06:46:59.000Z
|
2019-03-27T08:50:16.000Z
|
src/box_coder.py
|
b1xian/jinnan_yolo_baseline
|
539d748d7aa60ab0e3c964eab333af46b806e1db
|
[
"MIT"
] | null | null | null |
src/box_coder.py
|
b1xian/jinnan_yolo_baseline
|
539d748d7aa60ab0e3c964eab333af46b806e1db
|
[
"MIT"
] | 1
|
2020-10-28T10:08:20.000Z
|
2020-10-28T10:08:20.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author: tshzzz
"""
import numpy as np
import torch
from src.utils import py_cpu_nms,bbox_iou
def gen_yolo_box(featmaps,anchor_wh):
#featmaps = [b,c,h,w]
output = np.zeros((featmaps[0], featmaps[1], len(anchor_wh), 4))
for i in range(featmaps[0]):
for j in range(featmaps[1]):
cx = (j ) #/ featmaps[0]
cy = (i ) #/ featmaps[1]
for k,(w,h) in enumerate(anchor_wh):
output[i,j,k,:] = [cx, cy, w , h ]
return output
class yolo_box_encoder(object):
def __init__(self,anchor,class_num,featmap_size):
# anchor B,13,13,5
self.anchor = gen_yolo_box(featmap_size,anchor)
self.class_num = class_num
self.featmap_size = featmap_size
self.boxes_num = len(anchor)
def __call__(self,bs):
#global tw_a,tw_b
# b,c,h,w -> b,c,x,y
bb_class = np.zeros((self.featmap_size[0],self.featmap_size[1],self.boxes_num,self.class_num))
bb_boxes = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 4))
bb_conf = np.zeros((self.featmap_size[0],self.featmap_size[1],self.boxes_num,1))
for i in range(bs.shape[0]):
local_x = int(min(0.999, max(0, bs[i, 0] + bs[i, 2] / 2)) * (self.featmap_size[0]) )
local_y = int(min(0.999, max(0, bs[i, 1] + bs[i, 3] / 2)) * (self.featmap_size[1]) )
ious = []
for k in range(self.boxes_num):
temp_x,temp_y,temp_w,temp_h = self.anchor[local_y,local_x,k,:]
temp_w = temp_w / self.featmap_size[0]
temp_h = temp_h / self.featmap_size[1]
anchor_ = np.array([[0,0,temp_w,temp_h]])
gt = np.array([[0,0,bs[i,2],bs[i,3]]])
ious.append(bbox_iou(anchor_, gt)[0])
selected_ = np.argsort(ious)[::-1]
for kk,selected_anchor in enumerate(selected_):
if bb_conf[local_y,local_x, selected_anchor,0] == 0 and bs[i,2]>0.02 and bs[i,3]>0.02 :
tx = (bs[i, 0] + bs[i, 2] / 2) * self.featmap_size[0] \
- (self.anchor[local_y,local_x,selected_anchor,0] )
ty = (bs[i, 1] + bs[i, 3] / 2) * self.featmap_size[1] \
- (self.anchor[local_y,local_x,selected_anchor,1] )
tw = np.log(max(0.01,bs[i,2]* self.featmap_size[0] / self.anchor[local_y,local_x,selected_anchor,2]) )
th = np.log(max(0.01,bs[i,3]* self.featmap_size[1] / self.anchor[local_y,local_x,selected_anchor,3]) )
bb_boxes[local_y,local_x, selected_anchor,:] = np.array([tx,ty,tw,th])
#考虑背景 使用 softmax
#bb_class[local_x, local_y, selected_anchor,:] = 0
bb_class[local_y, local_x, selected_anchor, int(bs[i, 4])] = 1
bb_conf[local_y,local_x, selected_anchor,0] = 1
break
target = (bb_class,bb_conf,bb_boxes)
return target
class yolo_box_decoder(object):
def __init__(self, anchor, class_num,featmap_size,conf=0.05,nms_thresh=0.5):
self.class_num = class_num#
self.anchor = torch.from_numpy(gen_yolo_box(featmap_size, anchor)).float()
self.boxes_num = len(anchor)
self.featmap_size = featmap_size
self.conf_thresh = conf
self.nms_thresh = nms_thresh
def __call__(self, pred):
boxes = []
classes = []
pred_cls, pred_conf, pred_bboxes = pred
featmap_size = torch.Tensor([pred_cls.shape[1], pred_cls.shape[2]])
pred_cls = pred_cls.cpu().float().view(-1,self.class_num)
pred_conf = pred_conf.cpu().float().view(-1,1)
pred_bboxes = pred_bboxes.cpu().float().view(-1,4)
anchor = self.anchor.repeat(1, 1, 1, 1, 1).cpu().view(-1,4)
#找最anchor中置信度最高的
pred_mask = (pred_conf>self.conf_thresh).view(-1)
pred_bboxes = pred_bboxes[pred_mask]
pred_conf = pred_conf[pred_mask]
pred_cls = pred_cls[pred_mask]
anchor = anchor[pred_mask]
for cls in range(self.class_num):
cls_prob = pred_cls[:, cls].float() * pred_conf[:, 0]
mask_a = cls_prob.gt(self.conf_thresh)
bbox = pred_bboxes[mask_a]
anchor_ = anchor[mask_a]
cls_prob = cls_prob[mask_a]
if bbox.shape[0] > 0:
bbox[:, 2:4] = torch.exp(bbox[:, 2:4]) * anchor_[:, 2:4] / (featmap_size[0:2])
bbox[:, 0:2] = (bbox[:, 0:2] + (anchor_[:, 0:2]))/ (featmap_size[0:2]) - bbox[:, 2:4] / 2
#bbox[:, 0:2] = (bbox[:, 0:2] + (anchor_[:, 0:2])) - bbox[:, 2:4] / 2
pre_cls_box = bbox.data.numpy()
pre_cls_score = cls_prob.data.view(-1).numpy()
keep = py_cpu_nms(pre_cls_box, pre_cls_score, thresh=self.nms_thresh)
for conf_keep, loc_keep in zip(pre_cls_score[keep], pre_cls_box[keep]):
boxes.append(loc_keep)
classes.append([cls, conf_keep])
boxes = np.array(boxes)
classes = np.array(classes)
return boxes,classes
class single_decoder(object):
def __init__(self, anchor, class_num, featmap_size, conf=0.01):
self.class_num = class_num
self.anchor = torch.from_numpy(gen_yolo_box(featmap_size, anchor)).float()
self.boxes_num = len(anchor)
self.featmap_size = featmap_size
self.conf_thresh = conf
def __call__(self, pred):
pred_cls, pred_conf, pred_bboxes = pred
featmap_size = torch.Tensor([pred_cls.shape[1], pred_cls.shape[2]])
pred_cls = pred_cls.cpu().float().view(-1, self.class_num)
pred_conf = pred_conf.cpu().float().view(-1, 1)
pred_bboxes = pred_bboxes.cpu().float().view(-1, 4)
anchor = self.anchor.repeat(1, 1, 1, 1, 1).cpu().view(-1, 4)
# 找最anchor中置信度最高的
pred_mask = (pred_conf > self.conf_thresh).view(-1)
pred_bboxes = pred_bboxes[pred_mask]
pred_conf = pred_conf[pred_mask]
pred_cls = pred_cls[pred_mask]
anchor = anchor[pred_mask]
pred_bboxes[:, 2:4] = torch.exp(pred_bboxes[:, 2:4]) * anchor[:, 2:4] / (featmap_size[0:2])
pred_bboxes[:, 0:2] = (pred_bboxes[:, 0:2] + (anchor[:, 0:2]))/ (featmap_size[0:2]) - pred_bboxes[:, 2:4] / 2
return pred_cls, pred_conf, pred_bboxes
class group_decoder(object):
def __init__(self, anchor, class_num, featmap_size, conf=0.01, nms_thresh=0.5):
self.decoder = []
for i in range(len(anchor)):
self.decoder.append(single_decoder(anchor[i], class_num, featmap_size[i], conf))
self.class_num = class_num
self.conf_thresh = conf
self.nms_thresh = nms_thresh
def __call__(self, preds):
pred_cls = []
pred_conf = []
pred_bboxes = []
for pred,decoder in zip(preds,self.decoder):
cls,conf,bbox = decoder(pred)
pred_cls.append(cls)
pred_conf.append(conf)
pred_bboxes.append(bbox)
pred_cls = torch.cat([cls for cls in pred_cls])
pred_bboxes = torch.cat([bbox for bbox in pred_bboxes])
pred_conf = torch.cat([conf for conf in pred_conf])
boxes = []
classes = []
for cls in range(self.class_num):
cls_prob = pred_cls[:, cls].float() * pred_conf[:, 0]
mask_a = cls_prob.gt(self.conf_thresh)
bbox = pred_bboxes[mask_a]
cls_prob = cls_prob[mask_a]
iou_prob = pred_conf[mask_a]
if bbox.shape[0] > 0:
pre_cls_box = bbox.data.numpy()
pre_cls_score = cls_prob.data.view(-1).numpy()
iou_prob = iou_prob.data.view(-1).numpy()
keep = py_cpu_nms(pre_cls_box, pre_cls_score, thresh=self.nms_thresh)
for conf_keep, loc_keep in zip(pre_cls_score[keep], pre_cls_box[keep]):
boxes.append(loc_keep)
classes.append([cls, conf_keep])
boxes = np.array(boxes)
classes = np.array(classes)
return boxes, classes
class single_encoder(object):
def __init__(self, anchor, class_num, featmap_size):
# anchor B,13,13,5
self.anchor = gen_yolo_box(featmap_size, anchor)
self.class_num = class_num
self.featmap_size = featmap_size
self.boxes_num = len(anchor)
self.bb_class = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, self.class_num))
self.bb_boxes = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 4))
self.bb_conf = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 1))
def get_target(self):
return (self.bb_class,self.bb_conf,self.bb_boxes)
def clean_target(self):
self.bb_class = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, self.class_num))
self.bb_boxes = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 4))
self.bb_conf = np.zeros((self.featmap_size[0], self.featmap_size[1], self.boxes_num, 1))
return
def __call__(self, bs):
local_x = int(min(0.999, max(0, bs[0] + bs[2] / 2)) * (self.featmap_size[0]))
local_y = int(min(0.999, max(0, bs[1] + bs[3] / 2)) * (self.featmap_size[1]))
ious = []
for k in range(self.boxes_num):
temp_x, temp_y, temp_w, temp_h = self.anchor[local_y, local_x, k, :]
temp_w = temp_w / self.featmap_size[0]
temp_h = temp_h / self.featmap_size[1]
anchor_ = np.array([[0, 0, temp_w, temp_h]])
gt = np.array([[0, 0, bs[2], bs[3]]])
ious.append(bbox_iou(anchor_, gt)[0])
selected_ = np.argsort(ious)[::-1]
for kk, selected_anchor in enumerate(selected_):
if self.bb_conf[local_y, local_x, selected_anchor, 0] == 0 and bs[2] > 0.02 and bs[3] > 0.02:
tx = (bs[0] + bs[2] / 2) * self.featmap_size[0] - (self.anchor[local_y, local_x, selected_anchor, 0])
ty = (bs[1] + bs[3] / 2) * self.featmap_size[1] - (self.anchor[local_y, local_x, selected_anchor, 1])
tw = np.log(max(0.01, bs[2] * self.featmap_size[0] / self.anchor[local_y, local_x, selected_anchor, 2]))
th = np.log(max(0.01, bs[3] * self.featmap_size[1] / self.anchor[local_y, local_x, selected_anchor, 3]))
self.bb_boxes[local_y, local_x, selected_anchor, :] = np.array([tx, ty, tw, th])
# 考虑背景 使用 softmax
self.bb_class[local_y, local_x, selected_anchor, int(bs[4])] = 1
self.bb_conf[local_y, local_x, selected_anchor, 0] = 1
break
return
class group_encoder(object):
def __init__(self, anchor, class_num, featmap_size):
# anchor B,13,13,5
self.anchor = anchor
self.class_num = class_num
self.featmap_size = featmap_size
self.boxes_num = len(anchor)
self.featmap_num = len(featmap_size)
self.encoder = []
for i in range(len(anchor)):
self.encoder.append(single_encoder(anchor[i], class_num, featmap_size[i]))
def __call__(self, bs):
# global tw_a,tw_b
# b,c,h,w -> b,c,x,y
for i in range(bs.shape[0]):
for encoder in self.encoder:
encoder(bs[i])
target = []
for encoder in self.encoder:
target.append(encoder.get_target())
for encoder in self.encoder:
encoder.clean_target()
return target
| 34.725664
| 122
| 0.577557
| 1,757
| 11,772
| 3.6107
| 0.071144
| 0.109237
| 0.092213
| 0.034048
| 0.836538
| 0.827711
| 0.794609
| 0.766236
| 0.759458
| 0.737863
| 0
| 0.032727
| 0.281006
| 11,772
| 338
| 123
| 34.828402
| 0.716801
| 0.034404
| 0
| 0.605769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072115
| false
| 0
| 0.014423
| 0.004808
| 0.158654
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b9b5d8c5495c5167d5ea31c14461d5e293218f5c
| 124
|
py
|
Python
|
dynophores/viz/plot/__init__.py
|
nadja-mansurov/dynophores
|
7d030170ab1af908730f960f3884048c36d8ef7a
|
[
"MIT"
] | null | null | null |
dynophores/viz/plot/__init__.py
|
nadja-mansurov/dynophores
|
7d030170ab1af908730f960f3884048c36d8ef7a
|
[
"MIT"
] | null | null | null |
dynophores/viz/plot/__init__.py
|
nadja-mansurov/dynophores
|
7d030170ab1af908730f960f3884048c36d8ef7a
|
[
"MIT"
] | null | null | null |
"""
Dynophores
Dynamic pharmacophore modeling of molecular interactions
"""
from . import static
from . import interactive
| 15.5
| 56
| 0.790323
| 13
| 124
| 7.538462
| 0.846154
| 0.204082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145161
| 124
| 7
| 57
| 17.714286
| 0.924528
| 0.540323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6a028ac4669676109c2d6cecb2ce880cd6753964
| 47
|
py
|
Python
|
api/tasks/__init__.py
|
ohduran/test-cookiecutter-django
|
449a3b0e8f36ef0c0df9ba17eda9cca593372d50
|
[
"MIT"
] | null | null | null |
api/tasks/__init__.py
|
ohduran/test-cookiecutter-django
|
449a3b0e8f36ef0c0df9ba17eda9cca593372d50
|
[
"MIT"
] | null | null | null |
api/tasks/__init__.py
|
ohduran/test-cookiecutter-django
|
449a3b0e8f36ef0c0df9ba17eda9cca593372d50
|
[
"MIT"
] | null | null | null |
from .selenium import *
from .example import *
| 15.666667
| 23
| 0.744681
| 6
| 47
| 5.833333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 24
| 23.5
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6a03894c74ef45db9668dd02387f98a7bc50487d
| 41
|
py
|
Python
|
ukb/metrics/__init__.py
|
wi905252/ukb-cardiac-mri
|
3177dde898a65b1d7f385b78e4f134de3852bea5
|
[
"Apache-2.0"
] | 19
|
2018-05-30T22:13:17.000Z
|
2022-01-18T14:04:40.000Z
|
ukb/metrics/__init__.py
|
wi905252/ukb-cardiac-mri
|
3177dde898a65b1d7f385b78e4f134de3852bea5
|
[
"Apache-2.0"
] | 1
|
2019-08-07T07:29:07.000Z
|
2019-08-07T08:54:10.000Z
|
ukb/metrics/__init__.py
|
wi905252/ukb-cardiac-mri
|
3177dde898a65b1d7f385b78e4f134de3852bea5
|
[
"Apache-2.0"
] | 8
|
2019-07-03T23:19:43.000Z
|
2021-11-15T17:09:24.000Z
|
from .base import *
from .fbeta import *
| 13.666667
| 20
| 0.707317
| 6
| 41
| 4.833333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195122
| 41
| 2
| 21
| 20.5
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6a05e4a97e2c9609799e1e7e956f7eccd88218f9
| 42
|
py
|
Python
|
tf_model_zoo/__init__.py
|
Nullius-2020/ECO-pp
|
3e1053368a58a884abf3f1558bb106f200708baa
|
[
"BSD-2-Clause"
] | 3
|
2020-11-26T07:50:02.000Z
|
2021-03-06T12:22:15.000Z
|
tf_model_zoo/__init__.py
|
Nullius-2020/ECO-pp
|
3e1053368a58a884abf3f1558bb106f200708baa
|
[
"BSD-2-Clause"
] | null | null | null |
tf_model_zoo/__init__.py
|
Nullius-2020/ECO-pp
|
3e1053368a58a884abf3f1558bb106f200708baa
|
[
"BSD-2-Clause"
] | null | null | null |
from .ECOfull.pytorch_load import ECOfull
| 21
| 41
| 0.857143
| 6
| 42
| 5.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 42
| 1
| 42
| 42
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6a11c8e687d4dfb3b27c3a5eadb6671ad90ca3ea
| 1,409
|
py
|
Python
|
aibg-ai/workstation/workgroups/views.py
|
BalderOdinson/ai-battleground-environment
|
b5a0a21ee90df113d34ab0f821ab9722007cc25c
|
[
"MIT"
] | null | null | null |
aibg-ai/workstation/workgroups/views.py
|
BalderOdinson/ai-battleground-environment
|
b5a0a21ee90df113d34ab0f821ab9722007cc25c
|
[
"MIT"
] | 1
|
2021-09-02T07:58:16.000Z
|
2021-09-02T07:58:16.000Z
|
aibg-ai/workstation/workgroups/views.py
|
BalderOdinson/ai-battleground-environment
|
b5a0a21ee90df113d34ab0f821ab9722007cc25c
|
[
"MIT"
] | null | null | null |
import json
from django.http import HttpRequest, JsonResponse
from connect.models import Connection
from .models import Workgroup
def schedule(request: HttpRequest):
if not Connection.authorize(request):
return JsonResponse({}, status=401)
try:
payload = json.loads(request.body)
worker_id = payload['worker_id']
script = payload['script']
class_name = payload['className']
method_name = payload['methodName']
args = payload['args']
Workgroup.allocate_workgroup(worker_id)
Workgroup.work(script, class_name, method_name, args)
Workgroup.free_workgroup(worker_id)
return JsonResponse({
"message": "success"
})
except ValueError as err:
return JsonResponse(err, 400)
def schedule_game(request: HttpRequest):
if not Connection.authorize(request):
return JsonResponse({}, status=401)
try:
payload = json.loads(request.body)
worker_id = payload['worker_id']
script = payload['script']
class_name = payload['className']
args = payload['args']
Workgroup.allocate_workgroup(worker_id)
Workgroup.game(script, class_name, args)
Workgroup.free_workgroup(worker_id)
return JsonResponse({
"message": "success"
})
except ValueError as err:
return JsonResponse(err, 400)
| 31.311111
| 61
| 0.649397
| 148
| 1,409
| 6.054054
| 0.297297
| 0.071429
| 0.066964
| 0.051339
| 0.772321
| 0.772321
| 0.772321
| 0.772321
| 0.772321
| 0.642857
| 0
| 0.011396
| 0.252661
| 1,409
| 45
| 62
| 31.311111
| 0.839506
| 0
| 0
| 0.769231
| 0
| 0
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051282
| false
| 0
| 0.102564
| 0
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6a3067acc06b2a7d76bb40199bc3275b6824984d
| 24,713
|
py
|
Python
|
tests/test_categorical_encoder.py
|
GLevV/feature_engine
|
c5f6d52ead2dbf86f03660d18db2bf62cd4e7024
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_categorical_encoder.py
|
GLevV/feature_engine
|
c5f6d52ead2dbf86f03660d18db2bf62cd4e7024
|
[
"BSD-3-Clause"
] | 1
|
2020-08-12T15:13:33.000Z
|
2020-08-12T15:13:33.000Z
|
tests/test_categorical_encoder.py
|
michalgromiec/feature_engine
|
7fa47cc7b305f5315282e8fc94bf4ed31b67ce9c
|
[
"BSD-3-Clause"
] | null | null | null |
# Authors: Soledad Galli <solegalli1@gmail.com>
# License: BSD 3 clause
import pytest
import pandas as pd
from sklearn.exceptions import NotFittedError
from feature_engine.categorical_encoders import CountFrequencyCategoricalEncoder
from feature_engine.categorical_encoders import OrdinalCategoricalEncoder
from feature_engine.categorical_encoders import MeanCategoricalEncoder
from feature_engine.categorical_encoders import WoERatioCategoricalEncoder
from feature_engine.categorical_encoders import OneHotCategoricalEncoder
from feature_engine.categorical_encoders import RareLabelCategoricalEncoder
def test_CountFrequencyCategoricalEncoder(dataframe_enc, dataframe_enc_rare, dataframe_enc_na):
# test case 1: 1 variable, counts
encoder = CountFrequencyCategoricalEncoder(encoding_method='count', variables=['var_A'])
X = encoder.fit_transform(dataframe_enc)
# transformed dataframe
transf_df = dataframe_enc.copy()
transf_df['var_A'] = [6, 6, 6, 6, 6, 6, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 4, 4, 4, 4]
# init params
assert encoder.encoding_method == 'count'
assert encoder.variables == ['var_A']
# fit params
assert encoder.encoder_dict_ == {'var_A': {'A': 6, 'B': 10, 'C': 4}}
assert encoder.input_shape_ == (20, 3)
# transform params
pd.testing.assert_frame_equal(X, transf_df)
# test case 2: automatically select variables, frequency
encoder = CountFrequencyCategoricalEncoder(encoding_method='frequency', variables=None)
X = encoder.fit_transform(dataframe_enc)
# transformed dataframe
transf_df['var_A'] = [0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5,
0.5, 0.5, 0.5, 0.5, 0.2, 0.2, 0.2, 0.2]
transf_df['var_B'] = [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.3, 0.3,
0.3, 0.3, 0.3, 0.3, 0.2, 0.2, 0.2, 0.2]
# init params
assert encoder.encoding_method == 'frequency'
assert encoder.variables == ['var_A', 'var_B']
# fit params
assert encoder.encoder_dict_ == {'var_A': {'A': 0.3, 'B': 0.5, 'C': 0.2},
'var_B': {'A': 0.5, 'B': 0.3, 'C': 0.2}}
assert encoder.input_shape_ == (20, 3)
# transform params
pd.testing.assert_frame_equal(X, transf_df)
with pytest.raises(ValueError):
CountFrequencyCategoricalEncoder(encoding_method='arbitrary')
# test case 3: when dataset to be transformed contains categories not present in training dataset
with pytest.warns(UserWarning):
encoder = CountFrequencyCategoricalEncoder()
encoder.fit(dataframe_enc)
encoder.transform(dataframe_enc_rare)
# test case 4: when dataset contains na, fit method
with pytest.raises(ValueError):
encoder = CountFrequencyCategoricalEncoder()
encoder.fit(dataframe_enc_na)
# test case 4: when dataset contains na, transform method
with pytest.raises(ValueError):
encoder = CountFrequencyCategoricalEncoder()
encoder.fit(dataframe_enc)
encoder.transform(dataframe_enc_na)
with pytest.raises(NotFittedError):
imputer = CountFrequencyCategoricalEncoder()
imputer.transform(dataframe_enc)
def test_OrdinalCategoricalEncoder(dataframe_enc, dataframe_enc_rare, dataframe_enc_na):
# test case 1: 1 variable, ordered encoding
encoder = OrdinalCategoricalEncoder(encoding_method='ordered', variables=['var_A'])
encoder.fit(dataframe_enc[['var_A', 'var_B']], dataframe_enc['target'])
X = encoder.transform(dataframe_enc[['var_A', 'var_B']])
# transformed dataframe
transf_df = dataframe_enc.copy()
transf_df['var_A'] = [1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2]
# init params
assert encoder.encoding_method == 'ordered'
assert encoder.variables == ['var_A']
# fit params
assert encoder.encoder_dict_ == {'var_A': {'A': 1, 'B': 0, 'C': 2}}
assert encoder.input_shape_ == (20, 2)
# transform params
pd.testing.assert_frame_equal(X, transf_df[['var_A', 'var_B']])
# test case 2: automatically select variables, unordered encoding
encoder = OrdinalCategoricalEncoder(encoding_method='arbitrary', variables=None)
X = encoder.fit_transform(dataframe_enc)
# transformed dataframe
transf_df['var_A'] = [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2]
transf_df['var_B'] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2]
# init params
assert encoder.encoding_method == 'arbitrary'
assert encoder.variables == ['var_A', 'var_B']
# fit params
assert encoder.encoder_dict_ == {'var_A': {'A': 0, 'B': 1, 'C': 2},
'var_B': {'A': 0, 'B': 1, 'C': 2}}
assert encoder.input_shape_ == (20, 3)
# transform params
pd.testing.assert_frame_equal(X, transf_df)
with pytest.raises(ValueError):
OrdinalCategoricalEncoder(encoding_method='other')
# test case 3: raises error if target is not passed
with pytest.raises(ValueError):
encoder = OrdinalCategoricalEncoder(encoding_method='ordered')
encoder.fit(dataframe_enc)
# test case 4: when dataset to be transformed contains categories not present in training dataset
with pytest.warns(UserWarning):
encoder = OrdinalCategoricalEncoder(encoding_method='arbitrary')
encoder.fit(dataframe_enc)
encoder.transform(dataframe_enc_rare)
with pytest.raises(NotFittedError):
imputer = OrdinalCategoricalEncoder()
imputer.transform(dataframe_enc)
# test case 4: when dataset contains na, fit method
with pytest.raises(ValueError):
encoder = OrdinalCategoricalEncoder(encoding_method='arbitrary')
encoder.fit(dataframe_enc_na)
# test case 4: when dataset contains na, transform method
with pytest.raises(ValueError):
encoder = OrdinalCategoricalEncoder(encoding_method='arbitrary')
encoder.fit(dataframe_enc)
encoder.transform(dataframe_enc_na)
def test_MeanCategoricalEncoder(dataframe_enc, dataframe_enc_rare, dataframe_enc_na):
# test case 1: 1 variable
encoder = MeanCategoricalEncoder(variables=['var_A'])
encoder.fit(dataframe_enc[['var_A', 'var_B']], dataframe_enc['target'])
X = encoder.transform(dataframe_enc[['var_A', 'var_B']])
# transformed dataframe
transf_df = dataframe_enc.copy()
transf_df['var_A'] = [0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333,
0.3333333333333333, 0.3333333333333333, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2,
0.2, 0.2, 0.5, 0.5, 0.5, 0.5]
# init params
assert encoder.variables == ['var_A']
# fit params
assert encoder.encoder_dict_ == {'var_A': {'A': 0.3333333333333333, 'B': 0.2, 'C': 0.5}}
assert encoder.input_shape_ == (20, 2)
# transform params
pd.testing.assert_frame_equal(X, transf_df[['var_A', 'var_B']])
# test case 2: automatically select variables
encoder = MeanCategoricalEncoder(variables=None)
encoder.fit(dataframe_enc[['var_A', 'var_B']], dataframe_enc['target'])
X = encoder.transform(dataframe_enc[['var_A', 'var_B']])
# transformed dataframe
transf_df['var_A'] = [0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333,
0.3333333333333333, 0.3333333333333333, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2,
0.2, 0.2, 0.5, 0.5, 0.5, 0.5]
transf_df['var_B'] = [0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.3333333333333333, 0.3333333333333333,
0.3333333333333333, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333,
0.5, 0.5, 0.5, 0.5]
# init params
assert encoder.variables == ['var_A', 'var_B']
# fit params
assert encoder.encoder_dict_ == {'var_A': {'A': 0.3333333333333333, 'B': 0.2, 'C': 0.5},
'var_B': {'A': 0.2, 'B': 0.3333333333333333, 'C': 0.5}}
assert encoder.input_shape_ == (20, 2)
# transform params
pd.testing.assert_frame_equal(X, transf_df[['var_A', 'var_B']])
# test case 3: raises error if target is not passed
with pytest.raises(TypeError):
encoder = MeanCategoricalEncoder()
encoder.fit(dataframe_enc)
# test case 4: when dataset to be transformed contains categories not present in training dataset
with pytest.warns(UserWarning):
encoder = MeanCategoricalEncoder()
encoder.fit(dataframe_enc[['var_A', 'var_B']], dataframe_enc['target'])
encoder.transform(dataframe_enc_rare[['var_A', 'var_B']])
# test case 4: when dataset contains na, fit method
with pytest.raises(ValueError):
encoder = MeanCategoricalEncoder()
encoder.fit(dataframe_enc_na[['var_A', 'var_B']], dataframe_enc_na['target'])
# test case 4: when dataset contains na, transform method
with pytest.raises(ValueError):
encoder = MeanCategoricalEncoder()
encoder.fit(dataframe_enc[['var_A', 'var_B']], dataframe_enc['target'])
encoder.transform(dataframe_enc_na)
with pytest.raises(NotFittedError):
imputer = OrdinalCategoricalEncoder()
imputer.transform(dataframe_enc)
def test_WoERatioCategoricalEncoder(dataframe_enc, dataframe_enc_rare, dataframe_enc_na):
# test case 1: 1 variable, ratio
encoder = WoERatioCategoricalEncoder(encoding_method='ratio', variables=['var_A'])
encoder.fit(dataframe_enc[['var_A', 'var_B']], dataframe_enc['target'])
X = encoder.transform(dataframe_enc[['var_A', 'var_B']])
# transformed dataframe
transf_df = dataframe_enc.copy()
transf_df['var_A'] = [0.49999999999999994, 0.49999999999999994, 0.49999999999999994, 0.49999999999999994,
0.49999999999999994, 0.49999999999999994, 0.25, 0.25,
0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 0.25, 1.0, 1.0, 1.0, 1.0]
# init params
assert encoder.encoding_method == 'ratio'
assert encoder.variables == ['var_A']
# fit params
assert encoder.encoder_dict_ == {'var_A': {'A': 0.49999999999999994, 'B': 0.25, 'C': 1.0}}
assert encoder.input_shape_ == (20, 2)
# transform params
pd.testing.assert_frame_equal(X, transf_df[['var_A', 'var_B']])
# test case 2: automatically select variables, log_ratio
encoder = WoERatioCategoricalEncoder(encoding_method='log_ratio', variables=None)
encoder.fit(dataframe_enc[['var_A', 'var_B']], dataframe_enc['target'])
X = encoder.transform(dataframe_enc[['var_A', 'var_B']])
# transformed dataframe
transf_df['var_A'] = [-0.6931471805599454, -0.6931471805599454, -0.6931471805599454, -0.6931471805599454,
-0.6931471805599454, -0.6931471805599454, -1.3862943611198906, -1.3862943611198906,
-1.3862943611198906, -1.3862943611198906, -1.3862943611198906, -1.3862943611198906,
-1.3862943611198906, -1.3862943611198906, -1.3862943611198906, -1.3862943611198906,
0.0, 0.0, 0.0, 0.0]
transf_df['var_B'] = [-1.3862943611198906, -1.3862943611198906, -1.3862943611198906, -1.3862943611198906,
-1.3862943611198906, -1.3862943611198906, -1.3862943611198906, -1.3862943611198906,
-1.3862943611198906, -1.3862943611198906, -0.6931471805599454, -0.6931471805599454,
-0.6931471805599454, -0.6931471805599454, -0.6931471805599454, -0.6931471805599454,
0.0, 0.0, 0.0, 0.0]
# init params
assert encoder.encoding_method == 'log_ratio'
assert encoder.variables == ['var_A', 'var_B']
# fit params
assert encoder.encoder_dict_ == {'var_A': {'A': -0.6931471805599454, 'B': -1.3862943611198906, 'C': 0.0},
'var_B': {'A': -1.3862943611198906, 'B': -0.6931471805599454, 'C': 0.0}}
assert encoder.input_shape_ == (20, 2)
# transform params
pd.testing.assert_frame_equal(X, transf_df[['var_A', 'var_B']])
# test case 3: automatically select variables, woe
encoder = WoERatioCategoricalEncoder(encoding_method='woe', variables=None)
encoder.fit(dataframe_enc[['var_A', 'var_B']], dataframe_enc['target'])
X = encoder.transform(dataframe_enc[['var_A', 'var_B']])
# transformed dataframe
transf_df['var_A'] = [0.15415067982725836, 0.15415067982725836, 0.15415067982725836, 0.15415067982725836,
0.15415067982725836, 0.15415067982725836, -0.5389965007326869, -0.5389965007326869,
-0.5389965007326869, -0.5389965007326869, -0.5389965007326869, -0.5389965007326869,
-0.5389965007326869, -0.5389965007326869, -0.5389965007326869, -0.5389965007326869,
0.8472978603872037, 0.8472978603872037, 0.8472978603872037, 0.8472978603872037]
transf_df['var_B'] = [-0.5389965007326869, -0.5389965007326869, -0.5389965007326869, -0.5389965007326869,
-0.5389965007326869, -0.5389965007326869, -0.5389965007326869, -0.5389965007326869,
-0.5389965007326869, -0.5389965007326869, 0.15415067982725836, 0.15415067982725836,
0.15415067982725836, 0.15415067982725836, 0.15415067982725836, 0.15415067982725836,
0.8472978603872037, 0.8472978603872037, 0.8472978603872037, 0.8472978603872037]
# init params
assert encoder.encoding_method == 'woe'
assert encoder.variables == ['var_A', 'var_B']
# fit params
assert encoder.encoder_dict_ == {'var_A': {'A': 0.15415067982725836, 'B': -0.5389965007326869, 'C': 0.8472978603872037},
'var_B': {'A': -0.5389965007326869, 'B': 0.15415067982725836, 'C': 0.8472978603872037}}
assert encoder.input_shape_ == (20, 2)
# transform params
pd.testing.assert_frame_equal(X, transf_df[['var_A', 'var_B']])
# test error raise
with pytest.raises(ValueError):
WoERatioCategoricalEncoder(encoding_method='other')
# test case 4: raises error if target is not passed
with pytest.raises(TypeError):
encoder = WoERatioCategoricalEncoder(encoding_method='woe')
encoder.fit(dataframe_enc)
# test case 5: when dataset to be transformed contains categories not present in training dataset
with pytest.warns(UserWarning):
encoder = WoERatioCategoricalEncoder(encoding_method='woe')
encoder.fit(dataframe_enc[['var_A', 'var_B']], dataframe_enc['target'])
encoder.transform(dataframe_enc_rare[['var_A', 'var_B']])
# test case 6: the target is not binary
with pytest.raises(ValueError):
df = {'var_A': ['A'] * 6 + ['B'] * 10 + ['C'] * 4,
'var_B': ['A'] * 10 + ['B'] * 6 + ['C'] * 4,
'target': [1, 1, 2, 2, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0]}
df = pd.DataFrame(df)
encoder = WoERatioCategoricalEncoder(encoding_method='woe')
encoder.fit(df[['var_A', 'var_B']], df['target'])
# test case 7: when the denominator probability is zero, ratio
with pytest.raises(ValueError):
df = {'var_A': ['A'] * 6 + ['B'] * 10 + ['C'] * 4,
'var_B': ['A'] * 10 + ['B'] * 6 + ['C'] * 4,
'target': [1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0]}
df = pd.DataFrame(df)
encoder = WoERatioCategoricalEncoder(encoding_method='ratio')
encoder.fit(df[['var_A', 'var_B']], df['target'])
# test case 8: when the denominator probability is zero, log_ratio
with pytest.raises(ValueError):
df = {'var_A': ['A'] * 6 + ['B'] * 10 + ['C'] * 4,
'var_B': ['A'] * 10 + ['B'] * 6 + ['C'] * 4,
'target': [1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0]}
df = pd.DataFrame(df)
encoder = WoERatioCategoricalEncoder(encoding_method='log_ratio')
encoder.fit(df[['var_A', 'var_B']], df['target'])
# test case 9: when the numerator probability is zero, only applies to log_ratio
with pytest.raises(ValueError):
df = {'var_A': ['A'] * 6 + ['B'] * 10 + ['C'] * 4,
'var_B': ['A'] * 10 + ['B'] * 6 + ['C'] * 4,
'target': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0]}
df = pd.DataFrame(df)
encoder = WoERatioCategoricalEncoder(encoding_method='log_ratio')
encoder.fit(df[['var_A', 'var_B']], df['target'])
# # test case 10: when the numerator probability is zero, woe
# with pytest.raises(ValueError):
# df = {'var_A': ['A'] * 6 + ['B'] * 10 + ['C'] * 4,
# 'var_B': ['A'] * 10 + ['B'] * 6 + ['C'] * 4,
# 'target': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0]}
# df = pd.DataFrame(df)
# encoder = WoERatioCategoricalEncoder(encoding_method='woe')
# encoder.fit(df[['var_A', 'var_B']], df['target'])
# # test case 11: when the denominator probability is zero, woe
# with pytest.raises(ValueError):
# df = {'var_A': ['A'] * 6 + ['B'] * 10 + ['C'] * 4,
# 'var_B': ['A'] * 10 + ['B'] * 6 + ['C'] * 4,
# 'target': [1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0]}
# df = pd.DataFrame(df)
# encoder = WoERatioCategoricalEncoder(encoding_method='woe')
# encoder.fit(df[['var_A', 'var_B']], df['target'])
# test case 12: non fitted error
with pytest.raises(NotFittedError):
imputer = WoERatioCategoricalEncoder()
imputer.transform(dataframe_enc)
# test case 13: when dataset contains na, fit method
with pytest.raises(ValueError):
encoder = WoERatioCategoricalEncoder(encoding_method='woe')
encoder.fit(dataframe_enc_na[['var_A', 'var_B']], dataframe_enc_na['target'])
# test case 14: when dataset contains na, transform method
with pytest.raises(ValueError):
encoder = WoERatioCategoricalEncoder(encoding_method='woe')
encoder.fit(dataframe_enc[['var_A', 'var_B']], dataframe_enc['target'])
encoder.transform(dataframe_enc_na)
def test_OneHotCategoricalEncoder(dataframe_enc_big, dataframe_enc_big_na):
# test case 1: encode all categories into k binary variables, select variables automatically
encoder = OneHotCategoricalEncoder(top_categories=None, variables=None, drop_last=False)
X = encoder.fit_transform(dataframe_enc_big)
# init params
assert encoder.top_categories is None
assert encoder.variables == ['var_A', 'var_B', 'var_C']
assert encoder.drop_last == False
# fit params
transf = {'var_A_A': 6, 'var_A_B': 10, 'var_A_C': 4, 'var_A_D': 10, 'var_A_E': 2, 'var_A_F': 2, 'var_A_G': 6,
'var_B_A': 10, 'var_B_B': 6, 'var_B_C': 4, 'var_B_D': 10, 'var_B_E': 2, 'var_B_F': 2, 'var_B_G': 6,
'var_C_A': 4, 'var_C_B': 6, 'var_C_C': 10, 'var_C_D': 10, 'var_C_E': 2, 'var_C_F': 2, 'var_C_G': 6}
assert encoder.input_shape_ == (40, 3)
# transform params
assert X.sum().to_dict() == transf
assert 'var_A' not in X.columns
# test case 2: encode all categories into k-1 binary variables, pass list of variables
encoder = OneHotCategoricalEncoder(top_categories=None, variables=['var_A', 'var_B'], drop_last=True)
X = encoder.fit_transform(dataframe_enc_big)
# init params
assert encoder.top_categories is None
assert encoder.variables == ['var_A', 'var_B']
assert encoder.drop_last == True
# fit params
transf = {'var_A_A': 6, 'var_A_B': 10, 'var_A_C': 4, 'var_A_D': 10, 'var_A_E': 2, 'var_A_F': 2,
'var_B_A': 10, 'var_B_B': 6, 'var_B_C': 4, 'var_B_D': 10, 'var_B_E': 2, 'var_B_F': 2}
assert encoder.input_shape_ == (40, 3)
# transform params
for col in transf.keys():
assert X[col].sum() == transf[col]
assert 'var_B' not in X.columns
assert 'var_B_G' not in X.columns
assert 'var_C' in X.columns
# test case 3: encode only the most popular categories
encoder = OneHotCategoricalEncoder(top_categories=4, variables=None, drop_last=False)
X = encoder.fit_transform(dataframe_enc_big)
# init params
assert encoder.top_categories == 4
# fit params
transf = {'var_A_D': 10, 'var_A_B': 10, 'var_A_A': 6, 'var_A_G': 6,
'var_B_A': 10, 'var_B_D': 10, 'var_B_G': 6, 'var_B_B': 6,
'var_C_D': 10, 'var_C_C': 10, 'var_C_G': 6, 'var_C_B': 6}
assert encoder.input_shape_ == (40, 3)
# transform params
for col in transf.keys():
assert X[col].sum() == transf[col]
assert 'var_B' not in X.columns
assert 'var_B_F' not in X.columns
with pytest.raises(ValueError):
OneHotCategoricalEncoder(top_categories=0.5)
with pytest.raises(ValueError):
OneHotCategoricalEncoder(drop_last=0.5)
# test case 4: when dataset contains na, fit method
with pytest.raises(ValueError):
encoder = OneHotCategoricalEncoder()
encoder.fit(dataframe_enc_big_na)
# test case 4: when dataset contains na, transform method
with pytest.raises(ValueError):
encoder = OneHotCategoricalEncoder()
encoder.fit(dataframe_enc_big)
encoder.transform(dataframe_enc_big_na)
def test_RareLabelEncoder(dataframe_enc_big, dataframe_enc_big_na):
# test case 1: defo params, automatically select variables
encoder = RareLabelCategoricalEncoder(tol=0.06, n_categories=5, variables=None, replace_with='Rare')
X = encoder.fit_transform(dataframe_enc_big)
df = {'var_A': ['A'] * 6 + ['B'] * 10 + ['C'] * 4 + ['D'] * 10 + ['Rare'] * 4 + ['G'] * 6,
'var_B': ['A'] * 10 + ['B'] * 6 + ['C'] * 4 + ['D'] * 10 + ['Rare'] * 4 + ['G'] * 6,
'var_C': ['A'] * 4 + ['B'] * 6 + ['C'] * 10 + ['D'] * 10 + ['Rare'] * 4 + ['G'] * 6, }
df = pd.DataFrame(df)
# init params
assert encoder.tol == 0.06
assert encoder.n_categories == 5
assert encoder.replace_with == 'Rare'
assert encoder.variables == ['var_A', 'var_B', 'var_C']
# fit params
assert encoder.input_shape_ == (40, 3)
# transform params
pd.testing.assert_frame_equal(X, df)
# test case 2: user provides alternative grouping value and variable list
encoder = RareLabelCategoricalEncoder(tol=0.15, n_categories=5, variables=['var_A', 'var_B'], replace_with='Other')
X = encoder.fit_transform(dataframe_enc_big)
df = {'var_A': ['A'] * 6 + ['B'] * 10 + ['Other'] * 4 + ['D'] * 10 + ['Other'] * 4 + ['G'] * 6,
'var_B': ['A'] * 10 + ['B'] * 6 + ['Other'] * 4 + ['D'] * 10 + ['Other'] * 4 + ['G'] * 6,
'var_C': ['A'] * 4 + ['B'] * 6 + ['C'] * 10 + ['D'] * 10 + ['E'] * 2 + ['F'] * 2 + ['G'] * 6}
df = pd.DataFrame(df)
# init params
assert encoder.tol == 0.15
assert encoder.n_categories == 5
assert encoder.replace_with == 'Other'
assert encoder.variables == ['var_A', 'var_B']
# fit params
assert encoder.input_shape_ == (40, 3)
# transform params
pd.testing.assert_frame_equal(X, df)
with pytest.raises(ValueError):
encoder = RareLabelCategoricalEncoder(tol=5)
with pytest.raises(ValueError):
encoder = RareLabelCategoricalEncoder(n_categories=0.5)
with pytest.raises(ValueError):
encoder = RareLabelCategoricalEncoder(replace_with=0.5)
# test case 3: when the variable has low cardinality
with pytest.warns(UserWarning):
encoder = RareLabelCategoricalEncoder(n_categories=10)
encoder.fit(dataframe_enc_big)
# test case 4: when dataset contains na, fit method
with pytest.raises(ValueError):
encoder = RareLabelCategoricalEncoder(n_categories=4)
encoder.fit(dataframe_enc_big_na)
# test case 5: when dataset contains na, transform method
with pytest.raises(ValueError):
encoder = RareLabelCategoricalEncoder(n_categories=4)
encoder.fit(dataframe_enc_big)
encoder.transform(dataframe_enc_big_na)
# test case 6: user provides the maximum number of categories they want
rare_encoder = RareLabelCategoricalEncoder(tol=0.10, max_n_categories=4,
n_categories=5)
X = rare_encoder.fit_transform(dataframe_enc_big)
df = {'var_A': ['A'] * 6 + ['B'] * 10 + ['Rare'] * 4 + ['D'] * 10 + ['Rare'] * 4 + ['G'] * 6,
'var_B': ['A'] * 10 + ['B'] * 6 + ['Rare'] * 4 + ['D'] * 10 + ['Rare'] * 4 + ['G'] * 6,
'var_C': ['Rare'] * 4 + ['B'] * 6 + ['C'] * 10 + ['D'] * 10 + ['Rare'] * 4 + ['G'] * 6, }
df = pd.DataFrame(df)
pd.testing.assert_frame_equal(X, df)
| 47.25239
| 124
| 0.631287
| 3,286
| 24,713
| 4.574559
| 0.053561
| 0.025546
| 0.01397
| 0.014103
| 0.861961
| 0.819585
| 0.754524
| 0.73736
| 0.71707
| 0.693986
| 0
| 0.139459
| 0.222676
| 24,713
| 522
| 125
| 47.342912
| 0.64305
| 0.162708
| 0
| 0.626543
| 0
| 0
| 0.073914
| 0
| 0
| 0
| 0
| 0
| 0.231481
| 1
| 0.018519
| false
| 0
| 0.027778
| 0
| 0.046296
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dbfeae23426f9b3edc5c70327f624bbd167142b9
| 173
|
py
|
Python
|
codigo.py
|
A01653108/HCAP2021
|
fcf306f4e54c8c27613346b10ef5cb436190c4a6
|
[
"MIT"
] | null | null | null |
codigo.py
|
A01653108/HCAP2021
|
fcf306f4e54c8c27613346b10ef5cb436190c4a6
|
[
"MIT"
] | null | null | null |
codigo.py
|
A01653108/HCAP2021
|
fcf306f4e54c8c27613346b10ef5cb436190c4a6
|
[
"MIT"
] | null | null | null |
import math
print(math.pi)
<<<<<<< HEAD
print("HOla esta es una linea nueva")
=======
print("hola esta es una nueva linea")
>>>>>>> 559cc8a2cdc2f0046bb5a95ee4dc507c4bc15534
| 21.625
| 48
| 0.699422
| 21
| 173
| 5.761905
| 0.571429
| 0.14876
| 0.214876
| 0.247934
| 0.297521
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150327
| 0.115607
| 173
| 7
| 49
| 24.714286
| 0.640523
| 0
| 0
| 0
| 0
| 0
| 0.323699
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.142857
| null | null | 0.428571
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
e01e1cf1e4d48501f9ad558d84fcfba441b4c6ce
| 10,436
|
py
|
Python
|
research/scraper.py
|
rahulmkumar/ZeroSim1
|
63eb357c4831b666581df0de4355f85095653d06
|
[
"MIT"
] | 1
|
2020-08-28T16:27:15.000Z
|
2020-08-28T16:27:15.000Z
|
research/scraper.py
|
rahulmkumar/ZeroSim1
|
63eb357c4831b666581df0de4355f85095653d06
|
[
"MIT"
] | null | null | null |
research/scraper.py
|
rahulmkumar/ZeroSim1
|
63eb357c4831b666581df0de4355f85095653d06
|
[
"MIT"
] | 1
|
2021-02-03T12:31:42.000Z
|
2021-02-03T12:31:42.000Z
|
import requests
from bs4 import BeautifulSoup
import pandas as pd
import time
import random
header_url = "http://www.finviz.com/screener.ashx?v=111&r=1"
data_url = "http://www.finviz.com/screener.ashx?v=111&r="
url_start = 1
url_end = 7141
sym_per_page = 20
pages = range(url_start,url_end,sym_per_page)
def scrape_page(url):
r = requests.get(url)
soup = BeautifulSoup(r.content)
return soup
soup = scrape_page(header_url)
#header = soup.find_all("tr",{"align" :"center"})
# This gets the header items
# information columns will store: Ticker, Company, Sector, Industry and Country
info_columns = []
# Data columns will store: Ticker, Market Cap, P/E, Price, Change and Volume
data_columns = []
#find total number of stocks
total_stocks = int(str(soup.find_all("td",{"class" : "count-text"})[0].contents[1]).split(' ')[0])
index = range(0,total_stocks)
#info_columns.append(soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"style" : "cursor:pointer;"})[0].text)
info_columns.append(soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"style" : "cursor:pointer;"})[1].text)
info_columns.append(soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"style" : "cursor:pointer;"})[2].text)
info_columns.append(soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"style" : "cursor:pointer;"})[3].text)
info_columns.append(soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"style" : "cursor:pointer;"})[4].text)
info_columns.append(soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"style" : "cursor:pointer;"})[5].text)
#print info_columns
#data_columns.append(soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"style" : "cursor:pointer;"})[1].text)
data_columns.append(soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"style" : "cursor:pointer;"})[6].text)
#data_columns.append(soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"style" : "cursor:pointer;"})[7].text)
data_columns.append(soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"style" : "cursor:pointer;"})[8].text)
data_columns.append(soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"style" : "cursor:pointer;"})[9].text)
data_columns.append(soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"style" : "cursor:pointer;"})[10].text)
#print data_columns
# first row returns the No. This can become a temporary index in a dataframe
#Ignore the No.
#print soup.find_all("td",{"align":"right","class":"body-table-nw"})[0].contents[0]
# create dataframes
df_info = pd.DataFrame(index = index, columns = info_columns)
df_data = pd.DataFrame(index = index, columns = data_columns)
sym_info_count = range(0,100,5)
sym_data_count = range(0,115,6)
for page in pages[0:3]:
fetch_url = data_url + str(page)
print fetch_url
#fetch_url = data_url + str(21)
soup = scrape_page(fetch_url)
snum = 0
for i in sym_info_count:
try:
info_index = int(soup.find_all("td",{"align":"right","class":"body-table-nw"})[snum].contents[0])-1
#print 'num:'+str(snum)
#print 'info_index:'+str(info_index)
df_info[info_columns[0]].ix[info_index] = soup.find_all("td",{"align":"left","class":"body-table-nw"})[i].contents[0].contents[0]
df_info[info_columns[1]].ix[info_index] = soup.find_all("td",{"align":"left","class":"body-table-nw"})[i+1].contents[0]
df_info[info_columns[2]].ix[info_index] = soup.find_all("td",{"align":"left","class":"body-table-nw"})[i+2].contents[0]
df_info[info_columns[3]].ix[info_index] = soup.find_all("td",{"align":"left","class":"body-table-nw"})[i+3].contents[0]
df_info[info_columns[4]].ix[info_index] = soup.find_all("td",{"align":"left","class":"body-table-nw"})[i+4].contents[0]
except:
print 'Issue with Info count for loop'
pass
snum +=6
for j in sym_data_count:
try:
data_index = int(soup.find_all("td",{"align":"right","class":"body-table-nw"})[j].contents[0])-1
#print 'j:'+str(j)
#print 'data_index:'+str(data_index)
#print data_index
if str(soup.find_all("td",{"align":"right","class":"body-table-nw"})[j+1].contents[0]).endswith("B"):
df_data[data_columns[0]].ix[data_index] = float(str(soup.find_all("td",{"align":"right","class":"body-table-nw"})[j+1].contents[0]).replace('B',''))*1000
elif soup.find_all("td",{"align":"right","class":"body-table-nw"})[j+1].contents[0] == '-':
df_data[data_columns[0]].ix[data_index] = 0
else:
df_data[data_columns[0]].ix[data_index] = str(soup.find_all("td",{"align":"right","class":"body-table-nw"})[j+1].contents[0]).replace('M','')
#df_data[data_columns[1]].ix[data_index] = soup.find_all("td",{"align":"right","class":"body-table-nw"})[j+2].contents[0]
df_data[data_columns[1]].ix[data_index] = soup.find_all("td",{"align":"right","class":"body-table-nw"})[j+3].contents[0].contents[0]
df_data[data_columns[2]].ix[data_index] = float(str(soup.find_all("td",{"align":"right","class":"body-table-nw"})[j+4].contents[0].contents[0]).replace('%',''))
df_data[data_columns[3]].ix[data_index] = long(str(soup.find_all("td",{"align":"right","class":"body-table-nw"})[j+5].contents[0]).replace(',',''))
except:
pass
# wait for a random amount of time between 5 and 60 seconds. Overall agerage wait will be 30 seconds per page.
wait_seconds = random.randint(5,60)
time.sleep(wait_seconds)
print 'waiting for:' + str(wait_seconds)
df_info.to_csv('df_info.csv')
df_data.to_csv('df_data.csv')
'''
# Alternate way of getting header
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[1].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[3].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[5].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[7].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[9].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[11].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[13].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[15].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[17].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[19].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[21].contents[0]
print soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"align" : "right"})[0].text
print soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"align" : "left"})[0].text
print soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"align" : "left"})[1].text
print soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"align" : "left"})[2].text
print soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"align" : "left"})[3].text
print soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"align" : "left"})[4].text
print soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"align" : "right"})[1].text
print soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"align" : "right"})[2].text
print soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"align" : "right"})[3].text
print soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"align" : "right"})[4].text
print soup.find_all("tr",{"align" : "center"})[0].find_all("td",{"align" : "right"})[5].text
print soup.find_all("tr",{"align" : "center"})[0].contents[1].contents[0]
print soup.find_all("tr",{"align" : "center"})[0].contents[3].contents[0].contents[0]
print soup.find_all("tr",{"align" : "center"})[0].contents[5].contents[0]
print soup.find_all("tr",{"align" : "center"})[0].contents[7].contents[0]
# Alternate way of getting details
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[23].contents[1].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[23].contents[2].contents[0].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[23].contents[3].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[23].contents[4].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[23].contents[5].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[23].contents[6].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[23].contents[7].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[23].contents[8].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[23].contents[9].contents[0].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[23].contents[10].contents[0].contents[0]
print soup.contents[2].contents[3].contents[29].contents[1].contents[1].contents[1].contents[23].contents[11].contents[0]
# Alternate way of getting details
print soup.find_all("td",{"align":"right"})[3].contents[0]
print soup.find_all("td",{"align":"left"})[8].contents[0].contents[0]
print soup.find_all("td",{"align":"left"})[9].contents[0]
print soup.find_all("td",{"align":"left"})[10].contents[0]
print soup.find_all("td",{"align":"left"})[11].contents[0]
print soup.find_all("td",{"align":"left"})[12].contents[0]
print soup.find_all("td",{"align":"right"})[4].contents[0]
print soup.find_all("td",{"align":"right"})[5].contents[0]
print soup.find_all("td",{"align":"right"})[6].contents[0].contents[0]
print soup.find_all("td",{"align":"right"})[7].contents[0].contents[0]
print soup.find_all("td",{"align":"right"})[8].contents[0]
'''
| 53.793814
| 172
| 0.668264
| 1,664
| 10,436
| 4.082332
| 0.098558
| 0.081407
| 0.172678
| 0.11924
| 0.791697
| 0.771824
| 0.74724
| 0.743412
| 0.730899
| 0.674371
| 0
| 0.04363
| 0.092947
| 10,436
| 193
| 173
| 54.072539
| 0.673991
| 0.116424
| 0
| 0.086957
| 0
| 0
| 0.201243
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.028986
| 0.072464
| null | null | 0.043478
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e0254b8056c51ffdbc4f2d5101102359149e4c15
| 69,006
|
py
|
Python
|
ansys/mapdl/core/post.py
|
da1910/pymapdl
|
305b70b30e61a78011e974ff4cb409ee21f89e13
|
[
"MIT"
] | 1
|
2021-07-28T00:42:53.000Z
|
2021-07-28T00:42:53.000Z
|
ansys/mapdl/core/post.py
|
da1910/pymapdl
|
305b70b30e61a78011e974ff4cb409ee21f89e13
|
[
"MIT"
] | null | null | null |
ansys/mapdl/core/post.py
|
da1910/pymapdl
|
305b70b30e61a78011e974ff4cb409ee21f89e13
|
[
"MIT"
] | null | null | null |
"""Post-processing module using MAPDL interface"""
import re
import weakref
import numpy as np
from ansys.mapdl.core.plotting import general_plotter
from ansys.mapdl.core.errors import MapdlRuntimeError
from ansys.mapdl.core.misc import supress_logging
COMPONENT_STRESS_TYPE = ['X', 'Y', 'Z', 'XY', 'YZ', 'XZ']
PRINCIPAL_TYPE = ['1', '2', '3']
STRESS_TYPES = ['X', 'Y', 'Z', 'XY', 'YZ', 'XZ', '1', '2', '3', 'INT', 'EQV']
COMP_TYPE = ['X', 'Y', 'Z', 'SUM']
DISP_TYPE = ['X', 'Y', 'Z', 'NORM', 'ALL']
ROT_TYPE = ['X', 'Y', 'Z', 'ALL']
def check_result_loaded(func):
"""Verify a result has been loaded within MAPDL"""
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
raise MapdlRuntimeError('Either this is an invalid result type for '
'this solution, or '
'no results set has been loaded within MAPDL.\n'
'Load a result set with:\n\n'
'\tmapdl.post1()\n'
'\tmapdl.set(1, 1)') from None
return wrapper
def check_comp(component, allowed):
if not isinstance(component, str):
raise TypeError('Component must be a string')
component = component.upper().strip()
if component not in allowed:
raise ValueError('Component %s not a valid type. ' % component +
'Allowed items:\n%s' % str(allowed))
return component
class PostProcessing():
"""Post-processing using an active MAPDL session"""
def __init__(self, mapdl):
"""Initialize postprocessing instance"""
from ansys.mapdl.core.mapdl import _MapdlCore
if not isinstance(mapdl, _MapdlCore): # pragma: no cover
raise TypeError('Must be initialized using Mapdl instance')
self._mapdl_weakref = weakref.ref(mapdl)
self._set_loaded = False
@property
def _mapdl(self):
"""Return the weakly referenced instance of MAPDL"""
return self._mapdl_weakref()
@property
def _log(self):
"""alias for mapdl log"""
return self._mapdl._log
def _set_log_level(self, level):
"""alias for mapdl._set_log_level"""
return self._mapdl._set_log_level(level)
@supress_logging
def __repr__(self):
info = 'PyMAPDL PostProcessing Instance\n'
info += '\tActive Result File: %s\n' % self.filename
info += '\tNumber of result sets: %d\n' % self.nsets
info += '\tCurrent load step: %d\n' % self.load_step
info += '\tCurrent sub step: %d\n' % self.sub_step
if self._mapdl.parameters.routine == 'POST1':
info += '\n\n' + self._mapdl.set('LIST')
else:
info += '\n\n Enable routine POST1 to see a table of available results'
return info
@property
def time_values(self):
"""Return an array of the time values for all result sets.
Examples
--------
Get all the time values after loading POST1.
>>> mapdl.post1()
>>> mapdl.post_processing.time_values
[75.00054133588232,
75.00081189985094,
75.00121680412036,
75.00574491751847,
75.03939292229019,
75.20949687626468]
"""
list_rsp = self._mapdl.set('LIST')
groups = re.findall(r'([-+]?\d*\.\d+|\d+)', list_rsp)
# values will always be the second set
return np.array([float(item) for item in (groups[1::5])])
def _reset_cache(self):
"""Reset local cache"""
self._set_loaded = False
@property
def filename(self) -> str:
"""Return the current result file name without extension.
Examples
--------
>>> mapdl.post_processing.filename
'file'
"""
response = self._mapdl.run('/INQUIRE, param, RSTFILE', mute=False)
return response.split('=')[-1].strip()
@property
def nsets(self) -> int:
"""Number of data sets on result file.
Examples
>>> mapdl.post_processing.nsets
1
"""
return int(self._mapdl.get_value("ACTIVE", item1="SET", it1num='NSET'))
@property
def load_step(self) -> int:
"""Current load step number
Examples
--------
>>> mapdl.post1()
>>> mapdl.set(2, 2)
>>> mapdl.post_processing.load_step
2
"""
return int(self._mapdl.get_value("ACTIVE", item1="SET", it1num='LSTP'))
@property
def sub_step(self) -> int:
"""Current sub step number
Examples
--------
>>> mapdl.post1()
>>> mapdl.set(2, 2)
>>> mapdl.post_processing.load_step
2
"""
return int(self._mapdl.get_value("ACTIVE", item1="SET", it1num='SBST'))
@property
def time(self) -> float:
"""Time associated with current result in the database.
Examples
--------
Time of the current result of a modal analysis
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.time
1.0
"""
return self._mapdl.get_value("ACTIVE", item1="SET", it1num='TIME')
@property
def freq(self) -> float:
"""Freqneyc associated with current result in the database.
Applicable for a Modal, harmonic or spectral analysis.
Examples
--------
Natural frequency of the current result of a modal analysis
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.freq
956.86239847
"""
return self._mapdl.get_value("ACTIVE", item1="SET", it1num='FREQ')
def nodal_displacement(self, component='NORM') -> np.ndarray:
"""Nodal X, Y, or Z structural displacement
Equilvanent MAPDL command:
``PRNSOL, U, X``
Parameters
----------
component : str, optional
Structural displacement component to retrieve. Must be
``'X'``, ``'Y'``, ``'Z'``, ``'ALL'``, or ``'NORM'``.
Defaults to ``'NORM'``.
Examples
--------
>>> mapdl.post_processing.nodal_displacement('X')
array([1.07512979e-04, 8.59137773e-05, 5.70690047e-05, ...,
5.70333124e-05, 8.58600402e-05, 1.07445726e-04])
Displacement in all dimensions
>>> mapdl.post_processing.nodal_displacement('ALL')
array([[ 1.07512979e-04, 6.05382076e-05, -1.64333622e-11],
[ 8.59137773e-05, 7.88053970e-05, -1.93668243e-11],
[ 5.70690047e-05, 1.23100157e-04, -1.04703715e-11],
...,
[ 5.70333124e-05, 1.23023176e-04, -9.77598660e-12],
[ 8.58600402e-05, 7.87561008e-05, -9.12531408e-12],
[ 1.07445726e-04, 6.05003408e-05, -1.23634647e-11]])
Nodes corresponding to the nodal displacements
>>> mapdl.mesh.nnum_all
array([ 1, 2, 3, ..., 7215, 7216, 7217], dtype=int32)
Notes
-----
This command always returns all nodal displacements regardless
of if the nodes are selected or not.
"""
component = check_comp(component, DISP_TYPE)
if component in ['NORM', 'ALL']:
x = self._ndof_rst('U', 'X')
y = self._ndof_rst('U', 'Y')
z = self._ndof_rst('U', 'Z')
disp = np.vstack((x, y, z))
if component == 'NORM':
return np.linalg.norm(disp, axis=0)
return disp.T
return self._ndof_rst('U', component)
def plot_nodal_displacement(self, component='NORM', show_node_numbering=False,
**kwargs):
"""Plot nodal displacement
Parameters
----------
component : str, optional
Structural displacement component to retrieve. Must be
``'X'``, ``'Y'``, ``'Z'``, or ``'NORM'``. Defaults to
``'NORM'``.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the normalized nodal displacement for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_displacement('NORM',
smooth_shading=True)
Plot the x displacement without smooth shading with individual
node numbering
>>> mapdl.post_processing.plot_nodal_displacement('X',
show_node_numbering=True)
"""
if isinstance(component, str):
if component.upper() == 'ALL':
raise ValueError('"ALL" not allowed in this context. Select a '
'single displacement component (e.g. "X")')
disp = self.nodal_displacement(component)
kwargs.setdefault('stitle', '%s Displacement' % component)
return self._plot_point_scalars(disp, show_node_numbering=show_node_numbering,
**kwargs)
def _plot_point_scalars(self, scalars, show_node_numbering=False, **kwargs):
"""Plot point scalars
Assumes scalars are from all nodes and not just the active surface.
"""
surf = self._mapdl.mesh._surf
# as ``disp`` returns the result for all nodes, we need all node numbers
# and to index to the output node numbers
if hasattr(self._mapdl.mesh, 'nnum_all'):
nnum = self._mapdl.mesh.nnum_all
else:
nnum = self._all_nnum
mask = np.in1d(nnum, surf['ansys_node_num'])
ridx = np.argsort(np.argsort(surf['ansys_node_num']))
if scalars.size != mask.size:
scalars = scalars[self.selected_nodes]
scalars = scalars[mask][ridx]
meshes = [{'mesh': surf.copy(deep=False), # deep=False for ipyvtk-simple
'scalar_bar_args': {'title': kwargs.pop('stitle', '')},
'scalars': scalars}]
labels = []
if show_node_numbering:
labels = [{'points': surf.points, 'labels': surf['ansys_node_num']}]
kwargs.setdefault('title', 'MAPDL Displacement')
return general_plotter(meshes, [], labels, **kwargs)
@property
@supress_logging
def _all_nnum(self):
self._mapdl.cm('__TMP_NODE__', 'NODE')
self._mapdl.allsel()
nnum = self._mapdl.get_array('NODE', item1='NLIST').astype(np.int32)
if nnum[0] == -1:
nnum = self._mapdl.get_array('NODE', item1='NLIST').astype(np.int32)
self._mapdl.cmsel('S', '__TMP_NODE__', 'NODE')
return nnum
@property
def _nsel(self):
"""Return the ANSYS formatted selected nodes array.
-1 for unselected
0 for undefined
1 for selected
"""
return self._ndof_rst('NSEL').astype(np.int8)
@property
def selected_nodes(self) -> np.ndarray:
"""Mask of the selected nodes.
Examples
--------
>>> mapdl.post_processing.node_selection
array([False, False, False, ..., True, True, True])
"""
return self._nsel == 1
def nodal_rotation(self, component='ALL') -> np.ndarray:
"""Nodal X, Y, or Z structural rotation
Equilvanent MAPDL commands:
``PRNSOL, ROT, X``
``PRNSOL, ROT, Y``
``PRNSOL, ROT, Z``
Parameters
----------
component : str, optional
Structural rotational component to retrieve. Must be
``'X'``, ``'Y'``, ``'Z'``, ``'ALL'``. Defaults to ``'ALL'``
Examples
--------
Nodal rotation in all dimensions for current result
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.nodal_rotation('ALL')
array([[0., 0., 0.],
[0., 0., 0.],
[0., 0., 0.],
...,
[0., 0., 0.],
[0., 0., 0.],
[0., 0., 0.]])
Nodes corresponding to the nodal rotations
>>> mapdl.mesh.nnum_all
array([ 1, 2, 3, ..., 7215, 7216, 7217], dtype=int32)
Notes
-----
This command always returns all nodal rotations regardless of
if the nodes are selected or not. Use the ``selected_nodes``
mask to get the currently selected nodes.
"""
component = check_comp(component, ROT_TYPE)
if component == 'ALL':
x = self._ndof_rst('ROT', 'X')
y = self._ndof_rst('ROT', 'Y')
z = self._ndof_rst('ROT', 'Z')
return np.vstack((x, y, z)).T
return self._ndof_rst('ROT', component)
def plot_nodal_rotation(self, component, show_node_numbering=False,
**kwargs):
"""Plot nodal rotation.
Parameters
----------
component : str
Structural rotation component to retrieve. Must be
``'X'``, ``'Y'``, or ``'Z'``.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the x rotation without smooth shading with individual
node numbering
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_rotation('X', show_node_numbering=True)
"""
if isinstance(component, str):
if component.upper() == 'ALL':
raise ValueError('"ALL" not allowed in this context. Select a '
'single component (e.g. "X")')
disp = self.nodal_rotation(component)
kwargs.setdefault('stitle', f'{component} Rotation')
return self._plot_point_scalars(disp, show_node_numbering=show_node_numbering,
**kwargs)
@check_result_loaded
def _ndof_rst(self, item, it1num=''):
"""Nodal degree of freedom result"""
return self._mapdl.get_array('NODE', item1=item, it1num=it1num)
@property
def nodal_temperature(self) -> np.ndarray:
"""The nodal temperature of the current result.
Equilvanent MAPDL command:
``PRNSOL, TEMP``
Examples
--------
>>> mapdl.post_processing.temperature
array([0., 0., 0., ..., 0., 0., 0.])
Notes
-----
The nodal results are averaged across all selected elements.
Not all nodes will contain valid results (e.g. midside nodes),
and those nodes will report a zero value.
Elements that are not selected will not contribute to the
averaged nodal values, and if a node's attached elements are
all unselected, the element will report a zero value.
"""
return self._ndof_rst('TEMP')
def plot_nodal_temperature(self, show_node_numbering=False, **kwargs):
"""Plot nodal temperature of the current result.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the nodal temperature for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.temperature()
Plot off_screen and save a screenshot
>>> mapdl.post_processing.plot_nodal_temperature(off_screen=True,
savefig='temp_1_2.png')
Subselect a single result type and plot those stress results
>>> mapdl.esel('S', 'TYPE', vmin=1)
>>> mapdl.post_processing.plot_nodal_temperature(smooth_shading=True)
"""
kwargs.setdefault('stitle', 'Nodal\nTemperature')
return self._plot_point_scalars(self.nodal_temperature,
show_node_numbering=show_node_numbering,
**kwargs)
@property
def nodal_pressure(self) -> np.ndarray:
"""The nodal pressure of the current result.
Equilvanent MAPDL command:
``PRNSOL, PRES``
Examples
--------
>>> mapdl.post_processing.pressure
array([0., 0., 0., ..., 0., 0., 0.])
Notes
-----
The nodal results are averaged across all selected elements.
Not all nodes will contain valid results (e.g. midside nodes),
and those nodes will report a zero value.
Elements that are not selected will not contribute to the
averaged nodal values, and if a node's attached elements are
all unselected, the element will report a zero value.
"""
return self._ndof_rst('PRES')
def plot_nodal_pressure(self, show_node_numbering=False, **kwargs):
"""Plot nodal pressure of the current result.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the nodal pressure for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_pressure()
Plot off_screen and save a screenshot
>>> mapdl.post_processing.plot_nodal_pressure(off_screen=True,
savefig='temp_1_2.png')
Subselect a single result type and plot those stress results
>>> mapdl.esel('S', 'TYPE', vmin=1)
>>> mapdl.post_processing.plot_nodal_pressure(smooth_shading=True)
"""
kwargs.setdefault('stitle', 'Nodal\nPressure')
return self._plot_point_scalars(self.nodal_pressure,
show_node_numbering=show_node_numbering,
**kwargs)
@property
def nodal_voltage(self) -> np.ndarray:
"""The nodal voltage of the current result.
Equilvanent MAPDL command:
``PRNSOL, PRES``
Examples
--------
>>> mapdl.post_processing.voltage
array([0., 0., 0., ..., 0., 0., 0.])
Notes
-----
The nodal results are averaged across all selected elements.
Not all nodes will contain valid results (e.g. midside nodes),
and those nodes will report a zero value.
Elements that are not selected will not contribute to the
averaged nodal values, and if a node's attached elements are
all unselected, the element will report a zero value.
"""
return self._ndof_rst('VOLT')
def plot_nodal_voltage(self, show_node_numbering=False, **kwargs):
"""Plot nodal voltage of the current result.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the nodal voltage for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_voltage()
Plot off_screen and save a screenshot
>>> mapdl.post_processing.plot_nodal_voltage(off_screen=True,
savefig='temp_1_2.png')
Subselect a single result type and plot those stress results
>>> mapdl.esel('S', 'TYPE', vmin=1)
>>> mapdl.post_processing.plot_nodal_voltage(smooth_shading=True)
"""
kwargs.setdefault('stitle', 'Nodal\nVoltage')
return self._plot_point_scalars(self.nodal_voltage,
show_node_numbering=show_node_numbering,
**kwargs)
def nodal_component_stress(self, component) -> np.ndarray:
"""Nodal component stress.
Equilvanent MAPDL commands:
\*VGET, PARM, NODE, , S, X
PRNSOL, S, COMP
Parameters
----------
component : str, optional
Nodal component stress component to retrieve. Must be
``'X'``, ``'Y'``, ``'Z'``, ``'XY'``, ``'YZ'``, or
``'XZ'``.
Examples
--------
Nodal stress in the X direction for the first result
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.nodal_component_stress('X')
array([0.60024621, 0.61625265, 0.65081825, ...,
0. , 0. , 0. ])
Corresponding nodes
>>> mapdl.mesh.nnum_all
array([ 1, 2, 3, ..., 7215, 7216, 7217], dtype=int32)
Notes
-----
This command always returns all nodal rotations regardless of
if the nodes are selected or not. Use the ``selected_nodes``
mask to get the currently selected nodes.
"""
component = check_comp(component, COMPONENT_STRESS_TYPE)
return self._ndof_rst('S', component)
def plot_nodal_component_stress(self, component, show_node_numbering=False,
**kwargs):
"""Plot nodal component stress.
Parameters
----------
component : str
Nodal component stress component to plot. Must be
``'X'``, ``'Y'``, ``'Z'``, ``'XY'``, ``'YZ'``, or
``'XZ'``.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the x nodal component stress for the second result set
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_component_stress('X')
"""
disp = self.nodal_component_stress(component)
kwargs.setdefault('stitle', f'{component} Nodal\nStress')
return self._plot_point_scalars(disp, show_node_numbering=show_node_numbering,
**kwargs)
def nodal_principal_stress(self, component) -> np.ndarray:
"""Nodal principal stress.
Equilvanent MAPDL commands:
\*VGET, PARM, NODE, , S, 1
PRNSOL, S, PRIN
Parameters
----------
component : str, optional
Nodal component stress component to retrieve. Must be
``'1'``, ``'2'``, or ``'3'``
Examples
--------
Nodal stress in the S1 direction for the first result
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.nodal_principal_stress('1')
array([0.60024621, 0.61625265, 0.65081825, ...,
0. , 0. , 0. ])
Corresponding nodes
>>> mapdl.mesh.nnum_all
array([ 1, 2, 3, ..., 7215, 7216, 7217], dtype=int32)
Notes
-----
This command always returns all nodal rotations regardless of
if the nodes are selected or not. Use the ``selected_nodes``
mask to get the currently selected nodes.
"""
if isinstance(component, int):
component = str(component)
component = check_comp(component, PRINCIPAL_TYPE)
return self._ndof_rst('S', component)
def plot_nodal_principal_stress(self, component, show_node_numbering=False,
**kwargs):
"""Plot nodal principal stress.
Parameters
----------
component : str
Nodal component stress component to plot. Must be
``'1'``, ``'2'``, or ``'3'``
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the nodal principal stress "1" for the second result set
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_principal_stress('1')
"""
disp = self.nodal_principal_stress(component)
kwargs.setdefault('stitle', f'{component} Nodal\nPrincipal Stress')
return self._plot_point_scalars(disp, show_node_numbering=show_node_numbering,
**kwargs)
@property
def nodal_stress_intensity(self) -> np.ndarray:
"""The nodal stress intensity of the current result.
Equilvanent MAPDL command:
``PRNSOL, S, PRIN``
Examples
--------
Stress intensity for result 2
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.nodal_stress_intensity
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Notes
-----
The nodal results are averaged across all selected elements.
Not all nodes will contain valid results (e.g. midside nodes),
and those nodes will report a zero stress.
Elements that are not selected will not contribute to the
averaged nodal values, and if a node's attached elements are
all unselected, the element will report a zero stress value.
"""
return self._ndof_rst('S', 'INT')
def plot_nodal_stress_intensity(self, show_node_numbering=False, **kwargs):
"""Plot the nodal stress intensity of the current result.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the equivalent stress for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_stress_intensity()
Plot off_screen and save a screenshot
>>> mapdl.post_processing.plot_nodal_stress_intensity(off_screen=True,
savefig='seqv_00.png')
Subselect a single result type and plot those stress results
>>> mapdl.esel('S', 'TYPE', vmin=1)
>>> mapdl.post_processing.plot_nodal_stress_intensity(smooth_shading=True)
"""
scalars = self.nodal_stress_intensity
kwargs.setdefault('stitle', 'Nodal Stress\nIntensity')
return self._plot_point_scalars(scalars,
show_node_numbering=show_node_numbering,
**kwargs)
@property
def nodal_eqv_stress(self) -> np.ndarray:
"""The nodal equivalent stress of the current result.
Equilvanent MAPDL command:
``PRNSOL, S, PRIN``
Examples
--------
>>> mapdl.post_processing.nodal_eqv_stress
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Stress from result 2
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.nodal_eqv_stress
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Notes
-----
The nodal results are averaged across all selected elements.
Not all nodes will contain valid results (e.g. midside nodes),
and those nodes will report a zero stress.
Elements that are not selected will not contribute to the
averaged nodal values, and if a node's attached elements are
all unselected, the element will report a zero stress value.
"""
return self._ndof_rst('S', 'EQV')
def plot_nodal_eqv_stress(self, show_node_numbering=False, **kwargs):
"""Plot nodal equivalent stress of the current result.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the equivalent stress for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_eqv_stress()
Plot off_screen and save a screenshot
>>> mapdl.post_processing.plot_nodal_eqv_stress(off_screen=True,
savefig='seqv_00.png')
Subselect a single result type and plot those stress results
>>> mapdl.esel('S', 'TYPE', vmin=1)
>>> mapdl.post_processing.plot_nodal_eqv_stress(smooth_shading=True)
"""
scalars = self.nodal_eqv_stress
kwargs.setdefault('stitle', 'Nodal Equilvanent\nStress')
return self._plot_point_scalars(scalars,
show_node_numbering=show_node_numbering,
**kwargs)
def nodal_total_component_strain(self, component) -> np.ndarray:
"""Total nodal component strain
Includes elastic, plastic, and creep strain.
Equilvanent MAPDL commands:
\*VGET, PARM, NODE, , EPTO, X
Parameters
----------
component : str, optional
Component to retrieve. Must be ``'X'``, ``'Y'``, ``'Z'``,
``'XY'``, ``'YZ'``, or ``'XZ'``.
Examples
--------
Total component strain in the X direction for the first result
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.nodal_total_component_strain('X')
array([0.60024621, 0.61625265, 0.65081825, ...,
0. , 0. , 0. ])
Corresponding nodes
>>> mapdl.mesh.nnum_all
array([ 1, 2, 3, ..., 7215, 7216, 7217], dtype=int32)
Notes
-----
This command always returns all nodal rotations regardless of
if the nodes are selected or not. Use the ``selected_nodes``
mask to get the currently selected nodes.
"""
if isinstance(component, int):
component = str(component)
component = check_comp(component, COMPONENT_STRESS_TYPE)
return self._ndof_rst('EPTO', component)
def plot_nodal_total_component_strain(self, component, show_node_numbering=False,
**kwargs):
"""Plot nodal total component starin.
Includes elastic, plastic, and creep strain.
Parameters
----------
component : str, optional
Component to retrieve. Must be ``'X'``, ``'Y'``, ``'Z'``,
``'XY'``, ``'YZ'``, or ``'XZ'``.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot total component strain in the X direction for the first result.
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.plot_nodal_total_component_strain('X')
"""
disp = self.nodal_total_component_strain(component)
kwargs.setdefault('stitle', f'{component} Total Nodal\nComponent Strain')
return self._plot_point_scalars(disp, show_node_numbering=show_node_numbering,
**kwargs)
def nodal_total_principal_strain(self, component) -> np.ndarray:
"""Total nodal principal total strain.
Includes elastic, plastic, and creep strain.
Equilvanent MAPDL commands:
\*VGET, PARM, NODE, , EPTO, 1
Parameters
----------
component : str, optional
Component to retrieve. Must be ``'1'``, ``'2'``, or
``'3'``
Examples
--------
Principal nodal strain in the S1 direction for the first result
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.nodal_total_principal_strain('1')
array([0.60024621, 0.61625265, 0.65081825, ...,
0. , 0. , 0. ])
Corresponding nodes
>>> mapdl.mesh.nnum_all
array([ 1, 2, 3, ..., 7215, 7216, 7217], dtype=int32)
Notes
-----
This command always returns all nodal rotations regardless of
if the nodes are selected or not. Use the ``selected_nodes``
mask to get the currently selected nodes.
"""
if isinstance(component, int):
component = str(component)
component = check_comp(component, PRINCIPAL_TYPE)
return self._ndof_rst('EPTO', component)
def plot_nodal_total_principal_strain(self, component,
show_node_numbering=False,
**kwargs):
"""Plot total nodal principal strain.
Includes elastic, plastic, and creep strain.
Parameters
----------
component : str
Nodal principal strain component to plot. Must be
``'1'``, ``'2'``, or ``'3'``
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the principal nodal strain in the S1 direction for the first result
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.nodal_total_principal_strain('1')
"""
disp = self.nodal_total_principal_strain(component)
kwargs.setdefault('stitle', '%s Nodal\nPrincipal Strain' % component)
return self._plot_point_scalars(disp, show_node_numbering=show_node_numbering,
**kwargs)
@property
def nodal_total_strain_intensity(self) -> np.ndarray:
"""The total nodal strain intensity of the current result.
Equilvanent MAPDL command:
``PRNSOL, EPTO, PRIN``
Examples
--------
Total strain intensity for result 2
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.nodal_total_strain_intensity
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Notes
-----
The nodal results are averaged across all selected elements.
Not all nodes will contain valid results (e.g. midside nodes),
and those nodes will report a zero stress.
Elements that are not selected will not contribute to the
averaged nodal values, and if a node's attached elements are
all unselected, the element will report a zero stress value.
"""
return self._ndof_rst('EPEL', 'INT')
def plot_nodal_total_strain_intensity(self,
show_node_numbering=False,
**kwargs):
"""Plot the total nodal strain intensity of the current result.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the total strain intensity for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_total_strain_intensity()
Plot off_screen and save a screenshot
>>> mapdl.post_processing.plot_nodal_total_strain_intensity(off_screen=True,
savefig='seqv_00.png')
Subselect a single result type and plot those strain results
>>> mapdl.esel('S', 'TYPE', vmin=1)
>>> mapdl.post_processing.plot_nodal_total_strain_intensity()
"""
scalars = self.nodal_total_strain_intensity
kwargs.setdefault('stitle', 'Total Nodal\nStrain Intensity')
return self._plot_point_scalars(scalars,
show_node_numbering=show_node_numbering,
**kwargs)
@property
def nodal_total_eqv_strain(self) -> np.ndarray:
"""The total nodal equivalent strain of the current result.
Equilvanent MAPDL command:
``PRNSOL, EPTO, PRIN``
Examples
--------
Total quivalent strain for the current result
>>> mapdl.post_processing.nodal_total_eqv_strain
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Strain from result 2
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.nodal_total_eqv_strain
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Notes
-----
The nodal results are averaged across all selected elements.
Not all nodes will contain valid results (e.g. midside nodes),
and those nodes will report a zero stress.
Elements that are not selected will not contribute to the
averaged nodal values, and if a node's attached elements are
all unselected, the element will report a zero stress value.
"""
return self._ndof_rst('EPTO', 'EQV')
def plot_nodal_total_eqv_strain(self, show_node_numbering=False, **kwargs):
"""Plot the total nodal equivalent strain of the current result.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the total equivalent strain for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_total_eqv_strain()
Plot off_screen and save a screenshot
>>> mapdl.post_processing.plot_nodal_total_eqv_strain(off_screen=True,
savefig='seqv_00.png')
Subselect a single result type and plot those strain results
>>> mapdl.esel('S', 'TYPE', vmin=1)
>>> mapdl.post_processing.plot_nodal_total_eqv_strain(smooth_shading=True)
"""
scalars = self.nodal_total_eqv_strain
kwargs.setdefault('stitle', 'Total Nodal\nEquivalent Strain')
return self._plot_point_scalars(scalars,
show_node_numbering=show_node_numbering,
**kwargs)
###############################################################################
def nodal_elastic_component_strain(self, component) -> np.ndarray:
"""Elastic nodal component strain
Equivalent MAPDL command:
PRNSOL, EPEL, PRIN
Parameters
----------
component : str, optional
Component to retrieve. Must be ``'X'``, ``'Y'``, ``'Z'``,
``'XY'``, ``'YZ'``, or ``'XZ'``.
Examples
--------
Elastic component strain in the X direction for the first result
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.nodal_elastic_component_strain('X')
array([0.60024621, 0.61625265, 0.65081825, ...,
0. , 0. , 0. ])
Corresponding nodes
>>> mapdl.mesh.nnum_all
array([ 1, 2, 3, ..., 7215, 7216, 7217], dtype=int32)
Notes
-----
This command always returns all nodal rotations regardless of
if the nodes are selected or not. Use the ``selected_nodes``
mask to get the currently selected nodes.
"""
if isinstance(component, int):
component = str(component)
component = check_comp(component, COMPONENT_STRESS_TYPE)
return self._ndof_rst('EPEL', component)
def plot_nodal_elastic_component_strain(self, component, show_node_numbering=False,
**kwargs):
"""Plot nodal elastic component strain.
Parameters
----------
component : str
Nodal elastic component to plot. Must be ``'X'``,
``'Y'``, ``'Z'``, ``'XY'``, ``'YZ'``, or ``'XZ'``.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the nodal elastic principal strain "1" for the second result set
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_elastic_component_strain('1')
"""
disp = self.nodal_elastic_component_strain(component)
kwargs.setdefault('stitle', '%s Elastic Nodal\nComponent Strain' % component)
return self._plot_point_scalars(disp, show_node_numbering=show_node_numbering,
**kwargs)
def nodal_elastic_principal_strain(self, component) -> np.ndarray:
"""Nodal elastic principal elastic strain.
Equivalent MAPDL commands:
\*VGET, PARM, NODE, , EPEL, 1
Parameters
----------
component : str, optional
Component to retrieve. Must be ``'1'``, ``'2'``, or
``'3'``
Examples
--------
Principal nodal strain in the S1 direction for the first result
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.nodal_elastic_principal_strain('1')
array([0.60024621, 0.61625265, 0.65081825, ...,
0. , 0. , 0. ])
Corresponding nodes
>>> mapdl.mesh.nnum_all
array([ 1, 2, 3, ..., 7215, 7216, 7217], dtype=int32)
Notes
-----
This command always returns all nodal rotations regardless of
if the nodes are selected or not. Use the ``selected_nodes``
mask to get the currently selected nodes.
"""
if isinstance(component, int):
component = str(component)
component = check_comp(component, PRINCIPAL_TYPE)
return self._ndof_rst('EPEL', component)
def plot_nodal_elastic_principal_strain(self, component,
show_node_numbering=False,
**kwargs):
"""Plot elastic nodal principal strain.
Parameters
----------
component : str
Nodal principal strain component to plot. Must be
``'1'``, ``'2'``, or ``'3'``
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the nodal principal strain "1" for the second result set
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_elastic_principal_strain('1')
"""
disp = self.nodal_elastic_principal_strain(component)
kwargs.setdefault('stitle', '%s Nodal\nPrincipal Strain' % component)
return self._plot_point_scalars(disp, show_node_numbering=show_node_numbering,
**kwargs)
@property
def nodal_elastic_strain_intensity(self) -> np.ndarray:
"""The elastic nodal strain intensity of the current result.
Equivalent MAPDL command:
``PRNSOL, EPEL, PRIN``
Examples
--------
Elastic strain intensity for result 2
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.nodal_elastic_strain_intensity
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Notes
-----
The nodal results are averaged across all selected elements.
Not all nodes will contain valid results (e.g. midside nodes),
and those nodes will report a zero value.
Elements that are not selected will not contribute to the
averaged nodal values, and if a node's attached elements are
all unselected, the element will report a zero value.
"""
return self._ndof_rst('EPEL', 'INT')
def plot_nodal_elastic_strain_intensity(self,
show_node_numbering=False,
**kwargs):
"""Plot the elastic nodal strain intensity of the current result.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the elastic strain intensity for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_elastic_strain_intensity()
Plot off_screen and save a screenshot
>>> mapdl.post_processing.plot_nodal_elastic_strain_intensity(off_screen=True,
savefig='seqv_00.png')
Subselect a single result type and plot those strain results
>>> mapdl.esel('S', 'TYPE', vmin=1)
>>> mapdl.post_processing.plot_nodal_elastic_strain_intensity()
"""
scalars = self.nodal_elastic_strain_intensity
kwargs.setdefault('stitle', 'Elastic Nodal\nStrain Intensity')
return self._plot_point_scalars(scalars,
show_node_numbering=show_node_numbering,
**kwargs)
@property
def nodal_elastic_eqv_strain(self) -> np.ndarray:
"""The elastic nodal equivalent strain of the current result.
Equivalent MAPDL command:
``PRNSOL, EPEL, PRIN``
Examples
--------
Elastic quivalent strain for the current result
>>> mapdl.post_processing.nodal_elastic_eqv_strain
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Strain from result 2
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.nodal_elastic_eqv_strain
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Notes
-----
The nodal results are averaged across all selected elements.
Not all nodes will contain valid results (e.g. midside nodes),
and those nodes will report a zero value.
Elements that are not selected will not contribute to the
averaged nodal values, and if a node's attached elements are
all unselected, the element will report a zero value.
"""
return self._ndof_rst('EPEL', 'EQV')
def plot_nodal_elastic_eqv_strain(self, show_node_numbering=False, **kwargs):
"""Plot the elastic nodal equivalent strain of the current result.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the elastic equivalent strain for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_elastic_eqv_strain()
Plot off_screen and save a screenshot
>>> mapdl.post_processing.plot_nodal_elastic_eqv_strain(off_screen=True,
savefig='seqv_00.png')
Subselect a single result type and plot those strain results
>>> mapdl.esel('S', 'TYPE', vmin=1)
>>> mapdl.post_processing.plot_nodal_elastic_eqv_strain(smooth_shading=True)
"""
scalars = self.nodal_elastic_eqv_strain
kwargs.setdefault('stitle', 'Elastic Nodal\n Equivalent Strain')
return self._plot_point_scalars(scalars,
show_node_numbering=show_node_numbering,
**kwargs)
###############################################################################
def nodal_plastic_component_strain(self, component) -> np.ndarray:
"""Plastic nodal component strain
Equivalent MAPDL command:
PRNSOL, EPPL, PRIN
Parameters
----------
component : str, optional
Component to retrieve. Must be ``'X'``, ``'Y'``, ``'Z'``,
``'XY'``, ``'YZ'``, or ``'XZ'``.
Examples
--------
Plastic component strain in the X direction for the first result
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.nodal_plastic_component_strain('X')
array([0.60024621, 0.61625265, 0.65081825, ...,
0. , 0. , 0. ])
Corresponding nodes
>>> mapdl.mesh.nnum_all
array([ 1, 2, 3, ..., 7215, 7216, 7217], dtype=int32)
Notes
-----
This command always returns all nodal rotations regardless of
if the nodes are selected or not. Use the ``selected_nodes``
mask to get the currently selected nodes.
"""
if isinstance(component, int):
component = str(component)
component = check_comp(component, COMPONENT_STRESS_TYPE)
return self._ndof_rst('EPPL', component)
def plot_nodal_plastic_component_strain(self, component, show_node_numbering=False,
**kwargs):
"""Plot nodal plastic component strain.
Parameters
----------
component : str
Nodal plastic component to plot. Must be ``'X'``,
``'Y'``, ``'Z'``, ``'XY'``, ``'YZ'``, or ``'XZ'``.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the nodal plastic principal strain "1" for the second result set
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_plastic_component_strain('1')
"""
disp = self.nodal_plastic_component_strain(component)
kwargs.setdefault('stitle', '%s Plastic Nodal\nComponent Strain' % component)
return self._plot_point_scalars(disp, show_node_numbering=show_node_numbering,
**kwargs)
def nodal_plastic_principal_strain(self, component) -> np.ndarray:
"""Nodal plastic principal plastic strain.
Equivalent MAPDL commands:
\*VGET, PARM, NODE, , EPPL, 1
Parameters
----------
component : str, optional
Component to retrieve. Must be ``'1'``, ``'2'``, or
``'3'``
Examples
--------
Principal nodal strain in the S1 direction for the first result
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.nodal_plastic_principal_strain('1')
array([0.60024621, 0.61625265, 0.65081825, ...,
0. , 0. , 0. ])
Corresponding nodes
>>> mapdl.mesh.nnum_all
array([ 1, 2, 3, ..., 7215, 7216, 7217], dtype=int32)
Notes
-----
This command always returns all nodal rotations regardless of
if the nodes are selected or not. Use the ``selected_nodes``
mask to get the currently selected nodes.
"""
if isinstance(component, int):
component = str(component)
component = check_comp(component, PRINCIPAL_TYPE)
return self._ndof_rst('EPPL', component)
def plot_nodal_plastic_principal_strain(self, component,
show_node_numbering=False,
**kwargs):
"""Plot plastic nodal principal strain.
Parameters
----------
component : str
Nodal principal strain component to plot. Must be
``'1'``, ``'2'``, or ``'3'``
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the nodal principal strain "1" for the second result set
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_plastic_principal_strain('1')
"""
disp = self.nodal_plastic_principal_strain(component)
kwargs.setdefault('stitle', '%s Nodal\nPrincipal Strain' % component)
return self._plot_point_scalars(disp, show_node_numbering=show_node_numbering,
**kwargs)
@property
def nodal_plastic_strain_intensity(self) -> np.ndarray:
"""The plastic nodal strain intensity of the current result.
Equivalent MAPDL command:
``PRNSOL, EPPL, PRIN``
Examples
--------
Plastic strain intensity for result 2
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.nodal_plastic_strain_intensity
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Notes
-----
The nodal results are averaged across all selected elements.
Not all nodes will contain valid results (e.g. midside nodes),
and those nodes will report a zero value.
Elements that are not selected will not contribute to the
averaged nodal values, and if a node's attached elements are
all unselected, the element will report a zero value.
"""
return self._ndof_rst('EPPL', 'INT')
def plot_nodal_plastic_strain_intensity(self,
show_node_numbering=False,
**kwargs):
"""Plot the plastic nodal strain intensity of the current result.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the plastic strain intensity for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_plastic_strain_intensity()
Plot off_screen and save a screenshot
>>> mapdl.post_processing.plot_nodal_plastic_strain_intensity(off_screen=True,
savefig='seqv_00.png')
Subselect a single result type and plot those strain results
>>> mapdl.esel('S', 'TYPE', vmin=1)
>>> mapdl.post_processing.plot_nodal_plastic_strain_intensity()
"""
scalars = self.nodal_plastic_strain_intensity
kwargs.setdefault('stitle', 'Plastic Nodal\nStrain Intensity')
return self._plot_point_scalars(scalars,
show_node_numbering=show_node_numbering,
**kwargs)
@property
def nodal_plastic_eqv_strain(self) -> np.ndarray:
"""The plastic nodal equivalent strain of the current result.
Equivalent MAPDL command:
``PRNSOL, EPPL, PRIN``
Examples
--------
Plastic quivalent strain for the current result
>>> mapdl.post_processing.nodal_plastic_eqv_strain
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Strain from result 2
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.nodal_plastic_eqv_strain
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Notes
-----
The nodal results are averaged across all selected elements.
Not all nodes will contain valid results (e.g. midside nodes),
and those nodes will report a zero value.
Elements that are not selected will not contribute to the
averaged nodal values, and if a node's attached elements are
all unselected, the element will report a zero value.
"""
return self._ndof_rst('EPPL', 'EQV')
def plot_nodal_plastic_eqv_strain(self, show_node_numbering=False, **kwargs):
"""Plot the plastic nodal equivalent strain of the current result.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the plastic equivalent strain for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_plastic_eqv_strain()
Plot off_screen and save a screenshot
>>> mapdl.post_processing.plot_nodal_plastic_eqv_strain(off_screen=True,
savefig='seqv_00.png')
Subselect a single result type and plot those strain results
>>> mapdl.esel('S', 'TYPE', vmin=1)
>>> mapdl.post_processing.plot_nodal_plastic_eqv_strain(smooth_shading=True)
"""
scalars = self.nodal_plastic_eqv_strain
kwargs.setdefault('stitle', 'Plastic Nodal\n Equivalent Strain')
return self._plot_point_scalars(scalars,
show_node_numbering=show_node_numbering,
**kwargs)
###############################################################################
def nodal_thermal_component_strain(self, component) -> np.ndarray:
"""Thermal nodal component strain
Equivalent MAPDL command:
PRNSOL, EPTH, PRIN
Parameters
----------
component : str, optional
Component to retrieve. Must be ``'X'``, ``'Y'``, ``'Z'``,
``'XY'``, ``'YZ'``, or ``'XZ'``.
Examples
--------
Thermal component strain in the X direction for the first result
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.nodal_thermal_component_strain('X')
array([0.60024621, 0.61625265, 0.65081825, ...,
0. , 0. , 0. ])
Corresponding nodes
>>> mapdl.mesh.nnum_all
array([ 1, 2, 3, ..., 7215, 7216, 7217], dtype=int32)
Notes
-----
This command always returns all nodal rotations regardless of
if the nodes are selected or not. Use the ``selected_nodes``
mask to get the currently selected nodes.
"""
if isinstance(component, int):
component = str(component)
component = check_comp(component, COMPONENT_STRESS_TYPE)
return self._ndof_rst('EPTH', component)
def plot_nodal_thermal_component_strain(self, component, show_node_numbering=False,
**kwargs):
"""Plot nodal thermal component strain.
Parameters
----------
component : str
Nodal thermal component to plot. Must be ``'X'``,
``'Y'``, ``'Z'``, ``'XY'``, ``'YZ'``, or ``'XZ'``.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the nodal thermal principal strain "1" for the second result set
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_thermal_component_strain('1')
"""
disp = self.nodal_thermal_component_strain(component)
kwargs.setdefault('stitle', '%s Thermal Nodal\nComponent Strain' % component)
return self._plot_point_scalars(disp, show_node_numbering=show_node_numbering,
**kwargs)
def nodal_thermal_principal_strain(self, component) -> np.ndarray:
"""Nodal thermal principal thermal strain.
Equivalent MAPDL commands:
\*VGET, PARM, NODE, , EPTH, 1
Parameters
----------
component : str, optional
Component to retrieve. Must be ``'1'``, ``'2'``, or
``'3'``
Examples
--------
Principal nodal strain in the S1 direction for the first result
>>> mapdl.post1()
>>> mapdl.set(1, 1)
>>> mapdl.post_processing.nodal_thermal_principal_strain('1')
array([0.60024621, 0.61625265, 0.65081825, ...,
0. , 0. , 0. ])
Corresponding nodes
>>> mapdl.mesh.nnum_all
array([ 1, 2, 3, ..., 7215, 7216, 7217], dtype=int32)
Notes
-----
This command always returns all nodal rotations regardless of
if the nodes are selected or not. Use the ``selected_nodes``
mask to get the currently selected nodes.
"""
if isinstance(component, int):
component = str(component)
component = check_comp(component, PRINCIPAL_TYPE)
return self._ndof_rst('EPTH', component)
def plot_nodal_thermal_principal_strain(self, component,
show_node_numbering=False,
**kwargs):
"""Plot thermal nodal principal strain.
Parameters
----------
component : str
Nodal principal strain component to plot. Must be
``'1'``, ``'2'``, or ``'3'``
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the nodal principal strain "1" for the second result set
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_thermal_principal_strain('1')
"""
disp = self.nodal_thermal_principal_strain(component)
kwargs.setdefault('stitle', '%s Nodal\nPrincipal Strain' % component)
return self._plot_point_scalars(disp, show_node_numbering=show_node_numbering,
**kwargs)
@property
def nodal_thermal_strain_intensity(self) -> np.ndarray:
"""The thermal nodal strain intensity of the current result.
Equivalent MAPDL command:
``PRNSOL, EPTH, PRIN``
Examples
--------
Thermal strain intensity for result 2
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.nodal_thermal_strain_intensity
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Notes
-----
The nodal results are averaged across all selected elements.
Not all nodes will contain valid results (e.g. midside nodes),
and those nodes will report a zero value.
Elements that are not selected will not contribute to the
averaged nodal values, and if a node's attached elements are
all unselected, the element will report a zero value.
"""
return self._ndof_rst('EPTH', 'INT')
def plot_nodal_thermal_strain_intensity(self,
show_node_numbering=False,
**kwargs):
"""Plot the thermal nodal strain intensity of the current result.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the thermal strain intensity for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_thermal_strain_intensity()
Plot off_screen and save a screenshot
>>> mapdl.post_processing.plot_nodal_thermal_strain_intensity(off_screen=True,
savefig='seqv_00.png')
Subselect a single result type and plot those strain results
>>> mapdl.esel('S', 'TYPE', vmin=1)
>>> mapdl.post_processing.plot_nodal_thermal_strain_intensity()
"""
scalars = self.nodal_thermal_strain_intensity
kwargs.setdefault('stitle', 'Thermal Nodal\nStrain Intensity')
return self._plot_point_scalars(scalars,
show_node_numbering=show_node_numbering,
**kwargs)
@property
def nodal_thermal_eqv_strain(self) -> np.ndarray:
"""The thermal nodal equivalent strain of the current result.
Equivalent MAPDL command:
``PRNSOL, EPTH, PRIN``
Examples
--------
Thermal quivalent strain for the current result
>>> mapdl.post_processing.nodal_thermal_eqv_strain
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Strain from result 2
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.nodal_thermal_eqv_strain
array([15488.84357602, 16434.95432337, 15683.2334295 , ...,
0. , 0. , 0. ])
Notes
-----
The nodal results are averaged across all selected elements.
Not all nodes will contain valid results (e.g. midside nodes),
and those nodes will report a zero value.
Elements that are not selected will not contribute to the
averaged nodal values, and if a node's attached elements are
all unselected, the element will report a zero value.
"""
return self._ndof_rst('EPTH', 'EQV')
def plot_nodal_thermal_eqv_strain(self, show_node_numbering=False, **kwargs):
"""Plot the thermal nodal equivalent strain of the current result.
Returns
--------
cpos : list
Camera position from plotter. Can be reused as an input
parameter to use the same camera position for future
plots.
Examples
--------
Plot the thermal equivalent strain for the second result
>>> mapdl.post1()
>>> mapdl.set(1, 2)
>>> mapdl.post_processing.plot_nodal_thermal_eqv_strain()
Plot off_screen and save a screenshot
>>> mapdl.post_processing.plot_nodal_thermal_eqv_strain(off_screen=True,
savefig='seqv_00.png')
Subselect a single result type and plot those strain results
>>> mapdl.esel('S', 'TYPE', vmin=1)
>>> mapdl.post_processing.plot_nodal_thermal_eqv_strain(smooth_shading=True)
"""
scalars = self.nodal_thermal_eqv_strain
kwargs.setdefault('stitle', 'Thermal Nodal\n Equivalent Strain')
return self._plot_point_scalars(scalars,
show_node_numbering=show_node_numbering,
**kwargs)
| 34.728737
| 90
| 0.551068
| 7,473
| 69,006
| 4.945671
| 0.054061
| 0.035228
| 0.046782
| 0.024351
| 0.849103
| 0.814984
| 0.780892
| 0.749587
| 0.727644
| 0.714224
| 0
| 0.040179
| 0.337811
| 69,006
| 1,986
| 91
| 34.746224
| 0.76864
| 0.553691
| 0
| 0.421717
| 0
| 0
| 0.094101
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.179293
| false
| 0
| 0.017677
| 0
| 0.381313
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e02e5a61128a8bdbe3a34dfb1aec0f261a6be6f1
| 131
|
py
|
Python
|
app/modules/_base_module.py
|
ihor-pyvovarnyk/oae-sound-processing-tool
|
602420cd9705997002b6cb9eb86bd09be899bd5d
|
[
"BSD-2-Clause"
] | null | null | null |
app/modules/_base_module.py
|
ihor-pyvovarnyk/oae-sound-processing-tool
|
602420cd9705997002b6cb9eb86bd09be899bd5d
|
[
"BSD-2-Clause"
] | null | null | null |
app/modules/_base_module.py
|
ihor-pyvovarnyk/oae-sound-processing-tool
|
602420cd9705997002b6cb9eb86bd09be899bd5d
|
[
"BSD-2-Clause"
] | null | null | null |
class BaseModule(object):
def __init__(self, connector):
self.connector = connector
def setup(self):
pass
| 18.714286
| 34
| 0.633588
| 14
| 131
| 5.642857
| 0.642857
| 0.329114
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.274809
| 131
| 6
| 35
| 21.833333
| 0.831579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.2
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
e050ba49895fe15d940e7bc831349709b4ea5adf
| 7,967
|
py
|
Python
|
hardware/tests/test_gps_pi.py
|
ab7289/mercury-hardware
|
dc2a4e888184a32aaa1355a1fe9ec77a9cb15ebe
|
[
"MIT"
] | 1
|
2020-05-09T21:37:12.000Z
|
2020-05-09T21:37:12.000Z
|
hardware/tests/test_gps_pi.py
|
ab7289/mercury-hardware
|
dc2a4e888184a32aaa1355a1fe9ec77a9cb15ebe
|
[
"MIT"
] | 8
|
2020-05-07T01:54:14.000Z
|
2020-05-13T21:31:56.000Z
|
hardware/tests/test_gps_pi.py
|
ab7289/mercury-hardware
|
dc2a4e888184a32aaa1355a1fe9ec77a9cb15ebe
|
[
"MIT"
] | 2
|
2020-05-06T22:24:20.000Z
|
2020-05-13T20:32:29.000Z
|
import unittest
from unittest.mock import patch
from testfixtures import TempDirectory
import os
from hardware.Utils.logger import Logger
from hardware.gpsPi.gps_reader import GPSReader
@patch("serial.Serial")
class GPSPiTests(unittest.TestCase):
def setUp(self):
self.temp_dir = TempDirectory()
def tearDown(self):
self.temp_dir.cleanup()
def test_init_no_logs(self, mock_port):
# Replace real object os.environ with mock dictionary
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
gps_reader = GPSReader()
mock_port.assert_called_with(
os.environ["GPS_PORT"], os.environ["GPS_BAUDRATE"],
)
self.assertTrue(gps_reader.logging is not None)
self.assertTrue(gps_reader.logging.name == "GPS_LOG_FILE")
self.assertIsInstance(gps_reader.logging, Logger)
def test_init_logs(self, mock_port):
with patch.dict(
os.environ,
{
"GPS_HAT_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
gps_reader = GPSReader("GPS_HAT_LOG_FILE")
mock_port.assert_called_with(
os.environ["GPS_PORT"], os.environ["GPS_BAUDRATE"],
)
self.assertTrue(gps_reader.logging is not None)
self.assertTrue(gps_reader.logging.name == "GPS_HAT_LOG_FILE")
self.assertIsInstance(gps_reader.logging, Logger)
@patch("hardware.gpsPi.gps_reader.date_str_with_current_timezone")
def test_get_location_valid_data(self, mock_date, mock_port):
mock_port.return_value.inWaiting.return_value = 1
mock_port.return_value.readline.return_value = (
"b'$GPRMC,194509.000,A,4042.6142,N,07400.4168,W,2.03,221.11,160412,,,A*77"
)
mock_date.return_value = "example date"
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
expected_data = {}
expected_data["sensor_id"] = 1
expected_data["values"] = {
"latitude": 40.71023666666667,
"longitude": -74.00694666666666,
}
expected_data["date"] = "example date"
gps_reader = GPSReader()
data = gps_reader.get_geolocation()
mock_port.return_value.inWaiting.assert_called()
mock_port.return_value.readline.assert_called()
self.assertEqual(expected_data, data)
@patch("hardware.gpsPi.gps_reader.date_str_with_current_timezone")
def test_get_location_other_valid_data(self, mock_date, mock_port):
mock_port.return_value.inWaiting.return_value = 1
mock_port.return_value.readline.return_value = (
"b'$GPRMC,194509.000,A,4042.6142,S,07400.4168,W,2.03,221.11,160412,,,A*77"
)
mock_date.return_value = "example date"
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
expected_data = {}
expected_data["sensor_id"] = 1
expected_data["values"] = {
"latitude": -40.71023666666667,
"longitude": -74.00694666666666,
}
expected_data["date"] = "example date"
gps_reader = GPSReader()
data = gps_reader.get_geolocation()
mock_port.return_value.inWaiting.assert_called()
mock_port.return_value.readline.assert_called()
self.assertEqual(expected_data, data)
def test_get_location_invalid_nmeatype(self, mock_port):
mock_port.return_value.inWaiting.return_value = 1
mock_port.return_value.readline.return_value = (
"b'$GPGGA,194509.000,A,4042.6142,N,07400.4168,W,2.03,221.11,160412,,,A*77"
)
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
expected_data = None
gps_reader = GPSReader()
data = gps_reader.get_geolocation()
mock_port.return_value.inWaiting.assert_called()
mock_port.return_value.readline.assert_called()
self.assertEqual(expected_data, data)
def test_get_location_invalid_data(self, mock_port):
mock_port.return_value.inWaiting.return_value = 1
mock_port.return_value.readline.return_value = (
"b'$GPRMC,194509.000,V,4042.6142,N,07400.4168,W,2.03,221.11,160412,,,A*77"
)
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
expected_data = None
gps_reader = GPSReader()
data = gps_reader.get_geolocation()
mock_port.return_value.inWaiting.assert_called()
mock_port.return_value.readline.assert_called()
self.assertEqual(expected_data, data)
@patch("hardware.gpsPi.gps_reader.date_str_with_current_timezone")
def test_get_speed_in_mph(self, mock_date, mock_port):
mock_port.return_value.inWaiting.return_value = 1
mock_port.return_value.readline.return_value = (
"b'$GPRMC,194509.000,A,4042.6142,N,07400.4168,W,2.03,221.11,160412,,,A*77"
)
mock_date.return_value = "example date"
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
speed_in_mph = 2.03 * 1.151
expected_data = {}
expected_data["sensor_id"] = 1
expected_data["values"] = {
"speed": speed_in_mph,
}
expected_data["date"] = "example date"
gps_reader = GPSReader()
data = gps_reader.get_speed_mph()
mock_port.return_value.inWaiting.assert_called()
mock_port.return_value.readline.assert_called()
self.assertEqual(expected_data, data)
def test_get_speed_in_mph_invalid_data(self, mock_port):
mock_port.return_value.inWaiting.return_value = 1
mock_port.return_value.readline.return_value = (
"b'$GP,194509.000,A,4042.6142,N,07400.4168,W,2.03,221.11,160412,,,A*77"
)
with patch.dict(
os.environ,
{
"GPS_LOG_FILE": "logger.txt",
"LOG_DIRECTORY": self.temp_dir.path,
"GPS_PORT": "/dev/serial0",
"GPS_BAUDRATE": "9600",
},
):
expected_data = None
gps_reader = GPSReader()
data = gps_reader.get_speed_mph()
mock_port.return_value.inWaiting.assert_called()
mock_port.return_value.readline.assert_called()
self.assertEqual(expected_data, data)
if __name__ == "__main__":
unittest.main()
| 31.741036
| 86
| 0.567842
| 893
| 7,967
| 4.763718
| 0.12654
| 0.100846
| 0.078984
| 0.107193
| 0.889046
| 0.889046
| 0.878467
| 0.878467
| 0.85543
| 0.85543
| 0
| 0.068711
| 0.320447
| 7,967
| 250
| 87
| 31.868
| 0.71703
| 0.006401
| 0
| 0.682292
| 0
| 0.03125
| 0.182209
| 0.075436
| 0
| 0
| 0
| 0
| 0.135417
| 1
| 0.052083
| false
| 0
| 0.03125
| 0
| 0.088542
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0eb278e9065ebf8c092ef9d7df0165d45be0322b
| 12,204
|
py
|
Python
|
hallo/test/modules/convert/test_convert_unit_set_prefix_group.py
|
SpangleLabs/Hallo
|
17145d8f76552ecd4cbc5caef8924bd2cf0cbf24
|
[
"MIT"
] | 1
|
2022-01-27T13:25:01.000Z
|
2022-01-27T13:25:01.000Z
|
hallo/test/modules/convert/test_convert_unit_set_prefix_group.py
|
joshcoales/Hallo
|
17145d8f76552ecd4cbc5caef8924bd2cf0cbf24
|
[
"MIT"
] | 75
|
2015-09-26T18:07:18.000Z
|
2022-01-04T07:15:11.000Z
|
hallo/test/modules/convert/test_convert_unit_set_prefix_group.py
|
SpangleLabs/Hallo
|
17145d8f76552ecd4cbc5caef8924bd2cf0cbf24
|
[
"MIT"
] | 1
|
2021-04-10T12:02:47.000Z
|
2021-04-10T12:02:47.000Z
|
import unittest
from hallo.events import EventMessage
from hallo.test.modules.convert.convert_function_test_base import ConvertFunctionTestBase
class ConvertUnitSetPrefixGroupTest(ConvertFunctionTestBase, unittest.TestCase):
def test_type_specified_1(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group type=test_type1 unit=same_name prefix_group=test_group1",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "test_group1" as the prefix group' in data[0].text.lower()
assert 'for the "unit1b" unit' in data[0].text.lower()
assert self.test_unit1b.valid_prefix_group == self.test_group1
def test_type_specified_2(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group type=test_type1 same_name prefix_group=test_group1",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "test_group1" as the prefix group' in data[0].text.lower()
assert 'for the "unit1b" unit' in data[0].text.lower()
assert self.test_unit1b.valid_prefix_group == self.test_group1
def test_type_specified_3(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group type=test_type1 unit=same_name test_group1",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "test_group1" as the prefix group' in data[0].text.lower()
assert 'for the "unit1b" unit' in data[0].text.lower()
assert self.test_unit1b.valid_prefix_group == self.test_group1
def test_type_specified_4(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group type=test_type1 same_name test_group1",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "test_group1" as the prefix group' in data[0].text.lower()
assert 'for the "unit1b" unit' in data[0].text.lower()
assert self.test_unit1b.valid_prefix_group == self.test_group1
def test_type_specified_set_group_none_1(self):
self.test_unit1b.valid_prefix_group = self.test_group1
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group type=test_type1 unit=same_name prefix_group=none",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "none" as the prefix group' in data[0].text.lower()
assert 'for the "unit1b" unit' in data[0].text.lower()
assert self.test_unit1b.valid_prefix_group is None
def test_type_specified_set_group_none_2(self):
self.test_unit1b.valid_prefix_group = self.test_group1
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group type=test_type1 same_name prefix_group=none",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "none" as the prefix group' in data[0].text.lower()
assert 'for the "unit1b" unit' in data[0].text.lower()
assert self.test_unit1b.valid_prefix_group is None
def test_type_specified_set_group_none_3(self):
self.test_unit1b.valid_prefix_group = self.test_group1
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group type=test_type1 unit=same_name none",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "none" as the prefix group' in data[0].text.lower()
assert 'for the "unit1b" unit' in data[0].text.lower()
assert self.test_unit1b.valid_prefix_group is None
def test_type_specified_set_group_none_4(self):
self.test_unit1b.valid_prefix_group = self.test_group1
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group type=test_type1 same_name none",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "none" as the prefix group' in data[0].text.lower()
assert 'for the "unit1b" unit' in data[0].text.lower()
assert self.test_unit1b.valid_prefix_group is None
def test_blank_message(self):
self.function_dispatcher.dispatch(
EventMessage(self.server, None, self.test_user, "convert unit prefix group")
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert (
"you must specify both a unit name and a prefix group to set"
in data[0].text.lower()
)
def test_one_word_1(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server, None, self.test_user, "convert unit prefix group unit1a"
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert (
"you must specify both a unit name and a prefix group to set"
in data[0].text.lower()
)
def test_one_word_2(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group test_group1",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert (
"you must specify both a unit name and a prefix group to set"
in data[0].text.lower()
)
def test_no_args_specified_1(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group unit1a test_group1",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "test_group1" as the prefix group' in data[0].text.lower()
assert 'for the "unit1a" unit' in data[0].text.lower()
assert self.test_unit1a.valid_prefix_group is self.test_group1
def test_no_args_specified_2(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group test_group1 unit1a",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "test_group1" as the prefix group' in data[0].text.lower()
assert 'for the "unit1a" unit' in data[0].text.lower()
assert self.test_unit1a.valid_prefix_group is self.test_group1
def test_unit_specified_1(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group unit=unit2a test_group1",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "test_group1" as the prefix group' in data[0].text.lower()
assert 'for the "unit2a" unit' in data[0].text.lower()
assert self.test_unit2a.valid_prefix_group is self.test_group1
def test_unit_specified_2(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group unit=unit2a group=test_group1",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "test_group1" as the prefix group' in data[0].text.lower()
assert 'for the "unit2a" unit' in data[0].text.lower()
assert self.test_unit2a.valid_prefix_group is self.test_group1
def test_extra_word_split(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group unit1a test_group1 blah",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert (
"could not parse where unit name ends and prefix group begins"
in data[0].text.lower()
)
assert (
"please specify with unit=<name> prefix_group=<name>"
in data[0].text.lower()
)
assert self.test_unit1a.valid_prefix_group is None
def test_ambiguous_unit(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group unit=same_name test_group1",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert "unit name is too ambiguous" in data[0].text.lower()
assert "please specify with unit= and type=" in data[0].text.lower()
assert self.test_unit1b.valid_prefix_group is None
assert self.test_unit2b.valid_prefix_group is None
def test_prefix_group_none_1(self):
self.test_unit2b.valid_prefix_group = self.test_group1
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group unit=unit2b none",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "none" as the prefix group' in data[0].text.lower()
assert 'for the "unit2b" unit' in data[0].text.lower()
assert self.test_unit2b.valid_prefix_group is None
def test_prefix_group_none_2(self):
self.test_unit2b.valid_prefix_group = self.test_group1
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group unit=unit2b prefixes=none",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert 'set "none" as the prefix group' in data[0].text.lower()
assert 'for the "unit2b" unit' in data[0].text.lower()
assert self.test_unit2b.valid_prefix_group is None
def test_unknown_group(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group unit=unit2b 'prefix group'='no group'",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert "prefix group not recognised" in data[0].text.lower()
assert self.test_unit2b.valid_prefix_group is None
def test_unknown_unit(self):
self.function_dispatcher.dispatch(
EventMessage(
self.server,
None,
self.test_user,
"convert unit prefix group unit=no_unit group=test_group1",
)
)
data = self.server.get_send_data(1, self.test_user, EventMessage)
assert "no unit found by that name" in data[0].text.lower()
| 39.882353
| 100
| 0.601934
| 1,494
| 12,204
| 4.708166
| 0.056894
| 0.090987
| 0.071652
| 0.057862
| 0.94029
| 0.938584
| 0.934603
| 0.931334
| 0.931334
| 0.931334
| 0
| 0.019507
| 0.315306
| 12,204
| 305
| 101
| 40.013115
| 0.822283
| 0
| 0
| 0.648936
| 0
| 0
| 0.19002
| 0.003933
| 0
| 0
| 0
| 0
| 0.195035
| 1
| 0.074468
| false
| 0
| 0.010638
| 0
| 0.088652
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0eb27e739f718485cc7a151aa1750da42ce658d5
| 10,976
|
py
|
Python
|
unittest/bindings/test_costs.py
|
iit-DLSLab/crocoddyl
|
2b8b731fae036916ff9b4ce3969e2c96c009593c
|
[
"BSD-3-Clause"
] | null | null | null |
unittest/bindings/test_costs.py
|
iit-DLSLab/crocoddyl
|
2b8b731fae036916ff9b4ce3969e2c96c009593c
|
[
"BSD-3-Clause"
] | null | null | null |
unittest/bindings/test_costs.py
|
iit-DLSLab/crocoddyl
|
2b8b731fae036916ff9b4ce3969e2c96c009593c
|
[
"BSD-3-Clause"
] | null | null | null |
import sys
import unittest
import numpy as np
import crocoddyl
import pinocchio
from crocoddyl.utils import (CoMPositionCostDerived, ControlCostDerived, FramePlacementCostDerived,
FrameTranslationCostDerived, FrameVelocityCostDerived, StateCostDerived)
class CostModelAbstractTestCase(unittest.TestCase):
ROBOT_MODEL = None
ROBOT_STATE = None
COST = None
COST_DER = None
def setUp(self):
self.robot_data = self.ROBOT_MODEL.createData()
self.x = self.ROBOT_STATE.rand()
self.u = pinocchio.utils.rand(self.ROBOT_MODEL.nv)
self.data = self.COST.createData(self.robot_data)
self.data_der = self.COST_DER.createData(self.robot_data)
nq, nv = self.ROBOT_MODEL.nq, self.ROBOT_MODEL.nv
pinocchio.forwardKinematics(self.ROBOT_MODEL, self.robot_data, self.x[:nq], self.x[nq:])
pinocchio.computeForwardKinematicsDerivatives(self.ROBOT_MODEL, self.robot_data, self.x[:nq], self.x[nq:],
pinocchio.utils.zero(nv))
pinocchio.computeJointJacobians(self.ROBOT_MODEL, self.robot_data, self.x[:nq])
pinocchio.updateFramePlacements(self.ROBOT_MODEL, self.robot_data)
pinocchio.jacobianCenterOfMass(self.ROBOT_MODEL, self.robot_data, self.x[:nq], False)
def test_dimensions(self):
self.assertEqual(self.COST.state.nx, self.COST_DER.state.nx, "Wrong nx.")
self.assertEqual(self.COST.state.ndx, self.COST_DER.state.ndx, "Wrong ndx.")
self.assertEqual(self.COST.nu, self.COST_DER.nu, "Wrong nu.")
self.assertEqual(self.COST.state.nq, self.COST_DER.state.nq, "Wrong nq.")
self.assertEqual(self.COST.state.nv, self.COST_DER.state.nv, "Wrong nv.")
self.assertEqual(self.COST.activation.nr, self.COST_DER.activation.nr, "Wrong nr.")
def test_calc(self):
# Run calc for both action models
self.COST.calc(self.data, self.x, self.u)
self.COST_DER.calc(self.data_der, self.x, self.u)
# Checking the cost value and its residual
self.assertAlmostEqual(self.data.cost, self.data_der.cost, 10, "Wrong cost value.")
self.assertTrue(np.allclose(self.data.r, self.data_der.r, atol=1e-9), "Wrong cost residuals.")
def test_calcDiff(self):
# Run calc for both action models
self.COST.calcDiff(self.data, self.x, self.u)
self.COST_DER.calcDiff(self.data_der, self.x, self.u)
# Checking the cost value and its residual
self.assertAlmostEqual(self.data.cost, self.data_der.cost, 10, "Wrong cost value.")
self.assertTrue(np.allclose(self.data.r, self.data_der.r, atol=1e-9), "Wrong cost residuals.")
# Checking the Jacobians and Hessians of the cost
self.assertTrue(np.allclose(self.data.Lx, self.data_der.Lx, atol=1e-9), "Wrong Lx.")
self.assertTrue(np.allclose(self.data.Lu, self.data_der.Lu, atol=1e-9), "Wrong Lu.")
self.assertTrue(np.allclose(self.data.Lxx, self.data_der.Lxx, atol=1e-9), "Wrong Lxx.")
self.assertTrue(np.allclose(self.data.Lxu, self.data_der.Lxu, atol=1e-9), "Wrong Lxu.")
self.assertTrue(np.allclose(self.data.Luu, self.data_der.Luu, atol=1e-9), "Wrong Luu.")
class CostModelSumTestCase(unittest.TestCase):
ROBOT_MODEL = None
ROBOT_STATE = None
COST = None
def setUp(self):
self.robot_data = self.ROBOT_MODEL.createData()
self.x = self.ROBOT_STATE.rand()
self.u = pinocchio.utils.rand(self.ROBOT_MODEL.nv)
self.cost_sum = crocoddyl.CostModelSum(self.ROBOT_STATE)
self.cost_sum.addCost('myCost', self.COST, 1.)
self.data = self.COST.createData(self.robot_data)
self.data_sum = self.cost_sum.createData(self.robot_data)
nq, nv = self.ROBOT_MODEL.nq, self.ROBOT_MODEL.nv
pinocchio.forwardKinematics(self.ROBOT_MODEL, self.robot_data, self.x[:nq], self.x[nq:])
pinocchio.computeForwardKinematicsDerivatives(self.ROBOT_MODEL, self.robot_data, self.x[:nq], self.x[nq:],
pinocchio.utils.zero(nv))
pinocchio.computeJointJacobians(self.ROBOT_MODEL, self.robot_data, self.x[:nq])
pinocchio.updateFramePlacements(self.ROBOT_MODEL, self.robot_data)
pinocchio.jacobianCenterOfMass(self.ROBOT_MODEL, self.robot_data, self.x[:nq], False)
def test_dimensions(self):
self.assertEqual(self.COST.state.nx, self.cost_sum.state.nx, "Wrong nx.")
self.assertEqual(self.COST.state.ndx, self.cost_sum.state.ndx, "Wrong ndx.")
self.assertEqual(self.COST.nu, self.cost_sum.nu, "Wrong nu.")
self.assertEqual(self.COST.state.nq, self.cost_sum.state.nq, "Wrong nq.")
self.assertEqual(self.COST.state.nv, self.cost_sum.state.nv, "Wrong nv.")
self.assertEqual(self.COST.activation.nr, self.cost_sum.nr, "Wrong nr.")
def test_calc(self):
# Run calc for both action models
self.COST.calc(self.data, self.x, self.u)
self.cost_sum.calc(self.data_sum, self.x, self.u)
# Checking the cost value and its residual
self.assertAlmostEqual(self.data.cost, self.data_sum.cost, 10, "Wrong cost value.")
self.assertTrue(np.allclose(self.data.r, self.data_sum.r, atol=1e-9), "Wrong cost residuals.")
def test_calcDiff(self):
# Run calc for both action models
self.COST.calcDiff(self.data, self.x, self.u)
self.cost_sum.calcDiff(self.data_sum, self.x, self.u)
# Checking the cost value and its residual
self.assertAlmostEqual(self.data.cost, self.data_sum.cost, 10, "Wrong cost value.")
self.assertTrue(np.allclose(self.data.r, self.data_sum.r, atol=1e-9), "Wrong cost residuals.")
# Checking the Jacobians and Hessians of the cost
self.assertTrue(np.allclose(self.data.Lx, self.data_sum.Lx, atol=1e-9), "Wrong Lx.")
self.assertTrue(np.allclose(self.data.Lu, self.data_sum.Lu, atol=1e-9), "Wrong Lu.")
self.assertTrue(np.allclose(self.data.Lxx, self.data_sum.Lxx, atol=1e-9), "Wrong Lxx.")
self.assertTrue(np.allclose(self.data.Lxu, self.data_sum.Lxu, atol=1e-9), "Wrong Lxu.")
self.assertTrue(np.allclose(self.data.Luu, self.data_sum.Luu, atol=1e-9), "Wrong Luu.")
def test_removeCost(self):
self.cost_sum.removeCost("myCost")
self.assertEqual(len(self.cost_sum.costs), 0, "The number of cost items should be zero")
class StateCostTest(CostModelAbstractTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
COST = crocoddyl.CostModelState(ROBOT_STATE)
COST_DER = StateCostDerived(ROBOT_STATE)
class StateCostSumTest(CostModelSumTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
COST = crocoddyl.CostModelState(ROBOT_STATE)
class ControlCostTest(CostModelAbstractTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
COST = crocoddyl.CostModelControl(ROBOT_STATE)
COST_DER = ControlCostDerived(ROBOT_STATE)
class ControlCostSumTest(CostModelSumTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
COST = crocoddyl.CostModelControl(ROBOT_STATE)
class CoMPositionCostTest(CostModelAbstractTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
cref = pinocchio.utils.rand(3)
COST = crocoddyl.CostModelCoMPosition(ROBOT_STATE, cref)
COST_DER = CoMPositionCostDerived(ROBOT_STATE, cref=cref)
class CoMPositionCostSumTest(CostModelSumTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
cref = pinocchio.utils.rand(3)
COST = crocoddyl.CostModelCoMPosition(ROBOT_STATE, cref)
class FramePlacementCostTest(CostModelAbstractTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
Mref = crocoddyl.FramePlacement(ROBOT_MODEL.getFrameId('rleg5_joint'), pinocchio.SE3.Random())
COST = crocoddyl.CostModelFramePlacement(ROBOT_STATE, Mref)
COST_DER = FramePlacementCostDerived(ROBOT_STATE, Mref=Mref)
class FramePlacementCostSumTest(CostModelSumTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
Mref = crocoddyl.FramePlacement(ROBOT_MODEL.getFrameId('rleg5_joint'), pinocchio.SE3.Random())
COST = crocoddyl.CostModelFramePlacement(ROBOT_STATE, Mref)
class FrameTranslationCostTest(CostModelAbstractTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
xref = crocoddyl.FrameTranslation(ROBOT_MODEL.getFrameId('rleg5_joint'), pinocchio.utils.rand(3))
COST = crocoddyl.CostModelFrameTranslation(ROBOT_STATE, xref)
COST_DER = FrameTranslationCostDerived(ROBOT_STATE, xref=xref)
class FrameTranslationCostSumTest(CostModelSumTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
xref = crocoddyl.FrameTranslation(ROBOT_MODEL.getFrameId('rleg5_joint'), pinocchio.utils.rand(3))
COST = crocoddyl.CostModelFrameTranslation(ROBOT_STATE, xref)
class FrameVelocityCostTest(CostModelAbstractTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
vref = crocoddyl.FrameMotion(ROBOT_MODEL.getFrameId('rleg5_joint'), pinocchio.Motion.Random())
COST = crocoddyl.CostModelFrameVelocity(ROBOT_STATE, vref)
COST_DER = FrameVelocityCostDerived(ROBOT_STATE, vref=vref)
class FrameVelocityCostSumTest(CostModelSumTestCase):
ROBOT_MODEL = pinocchio.buildSampleModelHumanoidRandom()
ROBOT_STATE = crocoddyl.StateMultibody(ROBOT_MODEL)
vref = crocoddyl.FrameMotion(ROBOT_MODEL.getFrameId('rleg5_joint'), pinocchio.Motion.Random())
COST = crocoddyl.CostModelFrameVelocity(ROBOT_STATE, vref)
if __name__ == '__main__':
test_classes_to_run = [
StateCostTest, StateCostSumTest, ControlCostTest, ControlCostSumTest, CoMPositionCostTest,
CoMPositionCostSumTest, FramePlacementCostTest, FramePlacementCostSumTest, FrameTranslationCostTest,
FrameTranslationCostSumTest, FrameVelocityCostTest, FrameVelocityCostSumTest
]
loader = unittest.TestLoader()
suites_list = []
for test_class in test_classes_to_run:
suite = loader.loadTestsFromTestCase(test_class)
suites_list.append(suite)
big_suite = unittest.TestSuite(suites_list)
runner = unittest.TextTestRunner()
results = runner.run(big_suite)
sys.exit(not results.wasSuccessful())
| 46.312236
| 114
| 0.728954
| 1,325
| 10,976
| 5.896604
| 0.113208
| 0.063996
| 0.032254
| 0.043005
| 0.783566
| 0.783566
| 0.778958
| 0.778958
| 0.778958
| 0.777422
| 0
| 0.005431
| 0.16117
| 10,976
| 236
| 115
| 46.508475
| 0.843163
| 0.035259
| 0
| 0.547619
| 0
| 0
| 0.045661
| 0
| 0
| 0
| 0
| 0
| 0.184524
| 1
| 0.053571
| false
| 0
| 0.035714
| 0
| 0.511905
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
0eddbd89c033895dcde7241f2b420970cd2fb899
| 34,313
|
py
|
Python
|
integration_tests/src/main/python/arithmetic_ops_test.py
|
mengdong/spark-rapids
|
7aafb4c4b85e65374e2fb29852ed2c47c8495054
|
[
"Apache-2.0"
] | null | null | null |
integration_tests/src/main/python/arithmetic_ops_test.py
|
mengdong/spark-rapids
|
7aafb4c4b85e65374e2fb29852ed2c47c8495054
|
[
"Apache-2.0"
] | null | null | null |
integration_tests/src/main/python/arithmetic_ops_test.py
|
mengdong/spark-rapids
|
7aafb4c4b85e65374e2fb29852ed2c47c8495054
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020-2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from asserts import assert_gpu_and_cpu_are_equal_collect, assert_gpu_and_cpu_error, assert_gpu_fallback_collect
from data_gen import *
from marks import incompat, approximate_float, allow_non_gpu
from pyspark.sql.types import *
from pyspark.sql.types import IntegralType
from spark_session import with_cpu_session, with_gpu_session, with_spark_session, is_before_spark_311, is_before_spark_320
import pyspark.sql.functions as f
from pyspark.sql.utils import IllegalArgumentException
# No overflow gens here because we just focus on verifying the fallback to CPU when
# enabling ANSI mode. But overflows will fail the tests because CPU runs raise
# exceptions.
_no_overflow_multiply_gens = [
ByteGen(min_val = 1, max_val = 10, special_cases=[]),
ShortGen(min_val = 1, max_val = 100, special_cases=[]),
IntegerGen(min_val = 1, max_val = 1000, special_cases=[]),
LongGen(min_val = 1, max_val = 3000, special_cases=[])]
def _get_overflow_df(spark, data, data_type, expr):
return spark.createDataFrame(
SparkContext.getOrCreate().parallelize([data]),
StructType([StructField('a', data_type)])
).selectExpr(expr)
decimal_gens_not_max_prec = [decimal_gen_neg_scale, decimal_gen_scale_precision,
decimal_gen_same_scale_precision, decimal_gen_64bit]
@pytest.mark.parametrize('data_gen', numeric_gens + decimal_gens_not_max_prec, ids=idfn)
def test_addition(data_gen):
data_type = data_gen.data_type
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).select(
f.col('a') + f.lit(100).cast(data_type),
f.lit(-12).cast(data_type) + f.col('b'),
f.lit(None).cast(data_type) + f.col('a'),
f.col('b') + f.lit(None).cast(data_type),
f.col('a') + f.col('b')),
conf=allow_negative_scale_of_decimal_conf)
# If it will not overflow for multiply it is good for add too
@pytest.mark.parametrize('data_gen', _no_overflow_multiply_gens, ids=idfn)
def test_addition_ansi_no_overflow(data_gen):
data_type = data_gen.data_type
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).select(
f.col('a') + f.lit(100).cast(data_type),
f.lit(-12).cast(data_type) + f.col('b'),
f.lit(None).cast(data_type) + f.col('a'),
f.col('b') + f.lit(None).cast(data_type),
f.col('a') + f.col('b')),
conf={'spark.sql.ansi.enabled': 'true'})
@pytest.mark.parametrize('data_gen', numeric_gens + decimal_gens_not_max_prec, ids=idfn)
def test_subtraction(data_gen):
data_type = data_gen.data_type
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).select(
f.col('a') - f.lit(100).cast(data_type),
f.lit(-12).cast(data_type) - f.col('b'),
f.lit(None).cast(data_type) - f.col('a'),
f.col('b') - f.lit(None).cast(data_type),
f.col('a') - f.col('b')),
conf=allow_negative_scale_of_decimal_conf)
# If it will not overflow for multiply it is good for subtract too
@pytest.mark.parametrize('data_gen', _no_overflow_multiply_gens, ids=idfn)
def test_subtraction_ansi_no_overflow(data_gen):
data_type = data_gen.data_type
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).select(
f.col('a') - f.lit(100).cast(data_type),
f.lit(-12).cast(data_type) - f.col('b'),
f.lit(None).cast(data_type) - f.col('a'),
f.col('b') - f.lit(None).cast(data_type),
f.col('a') - f.col('b')),
conf={'spark.sql.ansi.enabled': 'true'})
@pytest.mark.parametrize('data_gen', numeric_gens +
[decimal_gen_neg_scale, decimal_gen_scale_precision, decimal_gen_same_scale_precision, DecimalGen(8, 8)], ids=idfn)
def test_multiplication(data_gen):
data_type = data_gen.data_type
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).select(
f.col('a') * f.lit(100).cast(data_type),
f.lit(-12).cast(data_type) * f.col('b'),
f.lit(None).cast(data_type) * f.col('a'),
f.col('b') * f.lit(None).cast(data_type),
f.col('a') * f.col('b')),
conf=allow_negative_scale_of_decimal_conf)
@allow_non_gpu('ProjectExec', 'Alias', 'Multiply', 'Cast')
@pytest.mark.parametrize('data_gen', _no_overflow_multiply_gens, ids=idfn)
def test_multiplication_fallback_when_ansi_enabled(data_gen):
assert_gpu_fallback_collect(
lambda spark : binary_op_df(spark, data_gen).select(
f.col('a') * f.col('b')),
'Multiply',
conf={'spark.sql.ansi.enabled': 'true'})
@pytest.mark.parametrize('data_gen', [float_gen, double_gen,
decimal_gen_scale_precision], ids=idfn)
def test_multiplication_ansi_enabled(data_gen):
data_type = data_gen.data_type
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).select(
f.col('a') * f.lit(100).cast(data_type),
f.col('a') * f.col('b')),
conf={'spark.sql.ansi.enabled': 'true'})
@pytest.mark.parametrize('lhs', [DecimalGen(6, 5), DecimalGen(6, 4), DecimalGen(5, 4), DecimalGen(5, 3), DecimalGen(4, 2), DecimalGen(3, -2)], ids=idfn)
@pytest.mark.parametrize('rhs', [DecimalGen(6, 3)], ids=idfn)
def test_multiplication_mixed(lhs, rhs):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : two_col_df(spark, lhs, rhs).select(
f.col('a') * f.col('b')),
conf=allow_negative_scale_of_decimal_conf)
@pytest.mark.parametrize('data_gen', [double_gen, decimal_gen_neg_scale, DecimalGen(6, 3),
DecimalGen(5, 5), DecimalGen(6, 0),
pytest.param(DecimalGen(38, 21), marks=pytest.mark.xfail(reason="The precision is too large to be supported on the GPU", raises=IllegalArgumentException)),
pytest.param(DecimalGen(21, 17), marks=pytest.mark.xfail(reason="The precision is too large to be supported on the GPU", raises=IllegalArgumentException))], ids=idfn)
def test_division(data_gen):
data_type = data_gen.data_type
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).select(
f.col('a') / f.lit(100).cast(data_type),
f.lit(-12).cast(data_type) / f.col('b'),
f.lit(None).cast(data_type) / f.col('a'),
f.col('b') / f.lit(None).cast(data_type),
f.col('a') / f.col('b')),
conf=allow_negative_scale_of_decimal_conf)
@pytest.mark.parametrize('lhs', [DecimalGen(5, 3), DecimalGen(4, 2), DecimalGen(1, -2)], ids=idfn)
@pytest.mark.parametrize('rhs', [DecimalGen(4, 1)], ids=idfn)
def test_division_mixed(lhs, rhs):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : two_col_df(spark, lhs, rhs).select(
f.col('a') / f.col('b')),
conf=allow_negative_scale_of_decimal_conf)
@pytest.mark.parametrize('data_gen', integral_gens + [decimal_gen_default, decimal_gen_scale_precision,
decimal_gen_same_scale_precision, decimal_gen_64bit], ids=idfn)
def test_int_division(data_gen):
string_type = to_cast_string(data_gen.data_type)
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).selectExpr(
'a DIV cast(100 as {})'.format(string_type),
'cast(-12 as {}) DIV b'.format(string_type),
'cast(null as {}) DIV a'.format(string_type),
'b DIV cast(null as {})'.format(string_type),
'a DIV b'))
@pytest.mark.parametrize('lhs', [DecimalGen(6, 5), DecimalGen(5, 4), DecimalGen(3, -2)], ids=idfn)
@pytest.mark.parametrize('rhs', [DecimalGen(13, 2), DecimalGen(6, 3)], ids=idfn)
def test_int_division_mixed(lhs, rhs):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : two_col_df(spark, lhs, rhs).selectExpr(
'a DIV b'),
conf=allow_negative_scale_of_decimal_conf)
@pytest.mark.parametrize('data_gen', numeric_gens, ids=idfn)
def test_mod(data_gen):
data_type = data_gen.data_type
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).select(
f.col('a') % f.lit(100).cast(data_type),
f.lit(-12).cast(data_type) % f.col('b'),
f.lit(None).cast(data_type) % f.col('a'),
f.col('b') % f.lit(None).cast(data_type),
f.col('a') % f.col('b')))
@pytest.mark.parametrize('data_gen', numeric_gens, ids=idfn)
def test_pmod(data_gen):
string_type = to_cast_string(data_gen.data_type)
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).selectExpr(
'pmod(a, cast(100 as {}))'.format(string_type),
'pmod(cast(-12 as {}), b)'.format(string_type),
'pmod(cast(null as {}), a)'.format(string_type),
'pmod(b, cast(null as {}))'.format(string_type),
'pmod(a, b)'))
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_signum(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('signum(a)'))
@pytest.mark.parametrize('data_gen', numeric_gens + decimal_gens, ids=idfn)
def test_unary_minus(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('-a'),
conf=allow_negative_scale_of_decimal_conf)
@pytest.mark.parametrize('data_gen', _no_overflow_multiply_gens + [float_gen, double_gen] + decimal_gens, ids=idfn)
def test_unary_minus_ansi_no_overflow(data_gen):
conf = copy_and_update(allow_negative_scale_of_decimal_conf, {'spark.sql.ansi.enabled': 'true'})
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('-a'),
conf=conf)
@pytest.mark.parametrize('data_type,value', [
(LongType(), LONG_MIN),
(IntegerType(), INT_MIN),
(ShortType(), SHORT_MIN),
(ByteType(), BYTE_MIN)], ids=idfn)
def test_unary_minus_ansi_overflow(data_type, value):
conf = copy_and_update(allow_negative_scale_of_decimal_conf, {'spark.sql.ansi.enabled': 'true'})
assert_gpu_and_cpu_error(
df_fun=lambda spark: _get_overflow_df(spark, [value], data_type, '-a').collect(),
conf=conf,
error_message='ArithmeticException')
# This just ends up being a pass through. There is no good way to force
# a unary positive into a plan, because it gets optimized out, but this
# verifies that we can handle it.
@pytest.mark.parametrize('data_gen', numeric_gens + decimal_gens, ids=idfn)
def test_unary_positive(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('+a'),
conf=allow_negative_scale_of_decimal_conf)
@pytest.mark.parametrize('data_gen', numeric_gens + decimal_gens, ids=idfn)
def test_abs(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('abs(a)'),
conf=allow_negative_scale_of_decimal_conf)
# ANSI is ignored for abs prior to 3.2.0, but still okay to test it a little more.
@pytest.mark.parametrize('data_gen', _no_overflow_multiply_gens + [float_gen, double_gen] + decimal_gens, ids=idfn)
def test_abs_ansi_no_overflow(data_gen):
conf = copy_and_update(allow_negative_scale_of_decimal_conf, {'spark.sql.ansi.enabled': 'true'})
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('abs(a)'),
conf=conf)
# Only run this test for Spark v3.2.0 and later to verify abs will
# throw exceptions for overflow when ANSI mode is enabled.
@pytest.mark.skipif(is_before_spark_320(), reason='SPARK-33275')
@pytest.mark.parametrize('data_type,value', [
(LongType(), LONG_MIN),
(IntegerType(), INT_MIN),
(ShortType(), SHORT_MIN),
(ByteType(), BYTE_MIN)], ids=idfn)
def test_abs_ansi_overflow(data_type, value):
conf = copy_and_update(allow_negative_scale_of_decimal_conf, {'spark.sql.ansi.enabled': 'true'})
assert_gpu_and_cpu_error(
df_fun=lambda spark: _get_overflow_df(spark, [value], data_type, 'abs(a)').collect(),
conf=conf,
error_message='ArithmeticException')
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_asin(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('asin(a)'))
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_sqrt(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('sqrt(a)'))
@pytest.mark.parametrize('data_gen', double_n_long_gens + decimal_gens, ids=idfn)
def test_floor(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('floor(a)'),
conf=allow_negative_scale_of_decimal_conf)
@pytest.mark.parametrize('data_gen', double_n_long_gens + decimal_gens, ids=idfn)
def test_ceil(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('ceil(a)'),
conf=allow_negative_scale_of_decimal_conf)
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_rint(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('rint(a)'))
@pytest.mark.parametrize('data_gen', int_n_long_gens, ids=idfn)
def test_shift_left(data_gen):
string_type = to_cast_string(data_gen.data_type)
assert_gpu_and_cpu_are_equal_collect(
# The version of shiftLeft exposed to dataFrame does not take a column for num bits
lambda spark : two_col_df(spark, data_gen, IntegerGen()).selectExpr(
'shiftleft(a, cast(12 as INT))',
'shiftleft(cast(-12 as {}), b)'.format(string_type),
'shiftleft(cast(null as {}), b)'.format(string_type),
'shiftleft(a, cast(null as INT))',
'shiftleft(a, b)'))
@pytest.mark.parametrize('data_gen', int_n_long_gens, ids=idfn)
def test_shift_right(data_gen):
string_type = to_cast_string(data_gen.data_type)
assert_gpu_and_cpu_are_equal_collect(
# The version of shiftRight exposed to dataFrame does not take a column for num bits
lambda spark : two_col_df(spark, data_gen, IntegerGen()).selectExpr(
'shiftright(a, cast(12 as INT))',
'shiftright(cast(-12 as {}), b)'.format(string_type),
'shiftright(cast(null as {}), b)'.format(string_type),
'shiftright(a, cast(null as INT))',
'shiftright(a, b)'))
@pytest.mark.parametrize('data_gen', int_n_long_gens, ids=idfn)
def test_shift_right_unsigned(data_gen):
string_type = to_cast_string(data_gen.data_type)
assert_gpu_and_cpu_are_equal_collect(
# The version of shiftRightUnsigned exposed to dataFrame does not take a column for num bits
lambda spark : two_col_df(spark, data_gen, IntegerGen()).selectExpr(
'shiftrightunsigned(a, cast(12 as INT))',
'shiftrightunsigned(cast(-12 as {}), b)'.format(string_type),
'shiftrightunsigned(cast(null as {}), b)'.format(string_type),
'shiftrightunsigned(a, cast(null as INT))',
'shiftrightunsigned(a, b)'))
@incompat
@approximate_float
@pytest.mark.parametrize('data_gen', round_gens, ids=idfn)
def test_decimal_bround(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark: unary_op_df(spark, data_gen).selectExpr(
'bround(a)',
'bround(a, -1)',
'bround(a, 1)',
'bround(a, 2)',
'bround(a, 10)'),
conf=allow_negative_scale_of_decimal_conf)
@incompat
@approximate_float
@pytest.mark.parametrize('data_gen', round_gens, ids=idfn)
def test_decimal_round(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark: unary_op_df(spark, data_gen).selectExpr(
'round(a)',
'round(a, -1)',
'round(a, 1)',
'round(a, 2)',
'round(a, 10)'),
conf=allow_negative_scale_of_decimal_conf)
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_cbrt(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('cbrt(a)'))
@pytest.mark.parametrize('data_gen', integral_gens, ids=idfn)
def test_bit_and(data_gen):
string_type = to_cast_string(data_gen.data_type)
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).selectExpr(
'a & cast(100 as {})'.format(string_type),
'cast(-12 as {}) & b'.format(string_type),
'cast(null as {}) & a'.format(string_type),
'b & cast(null as {})'.format(string_type),
'a & b'))
@pytest.mark.parametrize('data_gen', integral_gens, ids=idfn)
def test_bit_or(data_gen):
string_type = to_cast_string(data_gen.data_type)
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).selectExpr(
'a | cast(100 as {})'.format(string_type),
'cast(-12 as {}) | b'.format(string_type),
'cast(null as {}) | a'.format(string_type),
'b | cast(null as {})'.format(string_type),
'a | b'))
@pytest.mark.parametrize('data_gen', integral_gens, ids=idfn)
def test_bit_xor(data_gen):
string_type = to_cast_string(data_gen.data_type)
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).selectExpr(
'a ^ cast(100 as {})'.format(string_type),
'cast(-12 as {}) ^ b'.format(string_type),
'cast(null as {}) ^ a'.format(string_type),
'b ^ cast(null as {})'.format(string_type),
'a ^ b'))
@pytest.mark.parametrize('data_gen', integral_gens, ids=idfn)
def test_bit_not(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('~a'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_radians(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('radians(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
@pytest.mark.xfail(reason='https://github.com/NVIDIA/spark-rapids/issues/109')
def test_degrees(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('degrees(a)'))
# Once https://github.com/NVIDIA/spark-rapids/issues/109 is fixed this can be removed
@approximate_float
@pytest.mark.parametrize('data_gen', [float_gen], ids=idfn)
def test_degrees_small(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('degrees(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_cos(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('cos(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_acos(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('acos(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_cosh(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('cosh(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_acosh(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('acosh(a)'))
# The default approximate is 1e-6 or 1 in a million
# in some cases we need to adjust this because the algorithm is different
@approximate_float(rel=1e-4, abs=1e-12)
# Because spark will overflow on large exponents drop to something well below
# what it fails at, note this is binary exponent, not base 10
@pytest.mark.parametrize('data_gen', [DoubleGen(min_exp=-20, max_exp=20)], ids=idfn)
def test_columnar_acosh_improved(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('acosh(a)'),
{'spark.rapids.sql.improvedFloatOps.enabled': 'true'})
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_sin(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('sin(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_sinh(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('sinh(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_asin(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('asin(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_asinh(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('asinh(a)'))
# The default approximate is 1e-6 or 1 in a million
# in some cases we need to adjust this because the algorithm is different
@approximate_float(rel=1e-4, abs=1e-12)
# Because spark will overflow on large exponents drop to something well below
# what it fails at, note this is binary exponent, not base 10
@pytest.mark.parametrize('data_gen', [DoubleGen(min_exp=-20, max_exp=20)], ids=idfn)
def test_columnar_asinh_improved(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('asinh(a)'),
{'spark.rapids.sql.improvedFloatOps.enabled': 'true'})
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_tan(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('tan(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_atan(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('atan(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_atanh(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('atanh(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_tanh(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('tanh(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_cot(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('cot(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_exp(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('exp(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_expm1(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('expm1(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_log(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('log(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_log1p(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('log1p(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_log2(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('log2(a)'))
@approximate_float
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_log10(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : unary_op_df(spark, data_gen).selectExpr('log10(a)'))
@approximate_float
@pytest.mark.xfail(reason='https://github.com/NVIDIA/spark-rapids/issues/89')
def test_logarithm():
# For the 'b' field include a lot more values that we would expect customers to use as a part of a log
data_gen = [('a', DoubleGen()),('b', DoubleGen().with_special_case(lambda rand: float(rand.randint(-16, 16)), weight=100.0))]
string_type = 'DOUBLE'
assert_gpu_and_cpu_are_equal_collect(
lambda spark : gen_df(spark, data_gen).selectExpr(
'log(a, cast(100 as {}))'.format(string_type),
'log(cast(-12 as {}), b)'.format(string_type),
'log(cast(null as {}), b)'.format(string_type),
'log(a, cast(null as {}))'.format(string_type),
'log(a, b)'))
@approximate_float
def test_scalar_pow():
# For the 'b' field include a lot more values that we would expect customers to use as a part of a pow
data_gen = [('a', DoubleGen()),('b', DoubleGen().with_special_case(lambda rand: float(rand.randint(-16, 16)), weight=100.0))]
string_type = 'DOUBLE'
assert_gpu_and_cpu_are_equal_collect(
lambda spark : gen_df(spark, data_gen).selectExpr(
'pow(a, cast(7 as {}))'.format(string_type),
'pow(cast(-12 as {}), b)'.format(string_type),
'pow(cast(null as {}), a)'.format(string_type),
'pow(b, cast(null as {}))'.format(string_type)))
@approximate_float
@pytest.mark.xfail(reason='https://github.com/NVIDIA/spark-rapids/issues/89')
@pytest.mark.parametrize('data_gen', double_gens, ids=idfn)
def test_columnar_pow(data_gen):
assert_gpu_and_cpu_are_equal_collect(
lambda spark : binary_op_df(spark, data_gen).selectExpr('pow(a, b)'))
@pytest.mark.parametrize('data_gen', all_basic_gens + decimal_gens, ids=idfn)
def test_least(data_gen):
num_cols = 20
s1 = gen_scalar(data_gen, force_no_nulls=not isinstance(data_gen, NullGen))
# we want lots of nulls
gen = StructGen([('_c' + str(x), data_gen.copy_special_case(None, weight=100.0))
for x in range(0, num_cols)], nullable=False)
command_args = [f.col('_c' + str(x)) for x in range(0, num_cols)]
command_args.append(s1)
data_type = data_gen.data_type
assert_gpu_and_cpu_are_equal_collect(
lambda spark : gen_df(spark, gen).select(
f.least(*command_args)), conf=allow_negative_scale_of_decimal_conf)
@pytest.mark.parametrize('data_gen', all_basic_gens + decimal_gens, ids=idfn)
def test_greatest(data_gen):
num_cols = 20
s1 = gen_scalar(data_gen, force_no_nulls=not isinstance(data_gen, NullGen))
# we want lots of nulls
gen = StructGen([('_c' + str(x), data_gen.copy_special_case(None, weight=100.0))
for x in range(0, num_cols)], nullable=False)
command_args = [f.col('_c' + str(x)) for x in range(0, num_cols)]
command_args.append(s1)
data_type = data_gen.data_type
assert_gpu_and_cpu_are_equal_collect(
lambda spark : gen_df(spark, gen).select(
f.greatest(*command_args)), conf=allow_negative_scale_of_decimal_conf)
def _test_div_by_zero(ansi_mode, expr):
ansi_conf = {'spark.sql.ansi.enabled': ansi_mode == 'ansi'}
data_gen = lambda spark: two_col_df(spark, IntegerGen(), IntegerGen(min_val=0, max_val=0), length=1)
div_by_zero_func = lambda spark: data_gen(spark).selectExpr(expr)
if ansi_mode == 'ansi':
# Note that Spark 3.2.0 throws SparkArithmeticException and < 3.2.0 throws java.lang.ArithmeticException
# so just look for ArithmeticException
assert_gpu_and_cpu_error(df_fun=lambda spark: div_by_zero_func(spark).collect(),
conf=ansi_conf,
error_message='ArithmeticException: divide by zero')
else:
assert_gpu_and_cpu_are_equal_collect(div_by_zero_func, ansi_conf)
@pytest.mark.parametrize('expr', ['1/0', 'a/0', 'a/b'])
@pytest.mark.xfail(condition=is_before_spark_311(), reason='https://github.com/apache/spark/pull/29882')
def test_div_by_zero_ansi(expr):
_test_div_by_zero(ansi_mode='ansi', expr=expr)
@pytest.mark.parametrize('expr', ['1/0', 'a/0', 'a/b'])
def test_div_by_zero_nonansi(expr):
_test_div_by_zero(ansi_mode='nonAnsi', expr=expr)
def _get_div_overflow_df(spark, expr):
return spark.createDataFrame(
[(LONG_MIN, -1)],
['a', 'b']
).selectExpr(expr)
div_overflow_exprs = [
'CAST(-9223372036854775808L as LONG) DIV -1',
'a DIV CAST(-1 AS INT)',
'a DIV b']
# Only run this test for Spark v3.2.0 and later to verify IntegralDivide will
# throw exceptions for overflow when ANSI mode is enabled.
@pytest.mark.skipif(is_before_spark_320(), reason='https://github.com/apache/spark/pull/32260')
@pytest.mark.parametrize('expr', div_overflow_exprs)
@pytest.mark.parametrize('ansi_enabled', ['false', 'true'])
def test_div_overflow_exception_when_ansi(expr, ansi_enabled):
ansi_conf = {'spark.sql.ansi.enabled': ansi_enabled}
if ansi_enabled == 'true':
assert_gpu_and_cpu_error(
df_fun=lambda spark: _get_div_overflow_df(spark, expr).collect(),
conf=ansi_conf,
error_message='java.lang.ArithmeticException: Overflow in integral divide')
else:
assert_gpu_and_cpu_are_equal_collect(
func=lambda spark: _get_div_overflow_df(spark, expr),
conf=ansi_conf)
# Only run this test before Spark v3.2.0 to verify IntegralDivide will NOT
# throw exceptions for overflow even ANSI mode is enabled.
@pytest.mark.skipif(not is_before_spark_320(), reason='https://github.com/apache/spark/pull/32260')
@pytest.mark.parametrize('expr', div_overflow_exprs)
@pytest.mark.parametrize('ansi_enabled', ['false', 'true'])
def test_div_overflow_no_exception_when_ansi(expr, ansi_enabled):
assert_gpu_and_cpu_are_equal_collect(
func=lambda spark: _get_div_overflow_df(spark, expr),
conf={'spark.sql.ansi.enabled': ansi_enabled})
_data_type_expr_for_add_overflow = [
([127], ByteType(), 'a + 1Y'),
([-128], ByteType(), '-1Y + a'),
([32767], ShortType(), 'a + 1S'),
([-32768], ShortType(), '-1S + a'),
([2147483647], IntegerType(), 'a + 1'),
([-2147483648], IntegerType(), '-1 + a'),
([9223372036854775807], LongType(), 'a + 1L'),
([-9223372036854775808], LongType(), '-1L + a'),
([3.4028235E38], FloatType(), 'a + a'),
([-3.4028235E38], FloatType(), 'a + a'),
([1.7976931348623157E308], DoubleType(), 'a + a'),
([-1.7976931348623157E308], DoubleType(), 'a + a')]
@pytest.mark.parametrize('data,tp,expr', _data_type_expr_for_add_overflow)
def test_add_overflow_with_ansi_enabled(data, tp, expr):
ansi_conf = {'spark.sql.ansi.enabled': 'true'}
if isinstance(tp, IntegralType):
assert_gpu_and_cpu_error(
lambda spark: _get_overflow_df(spark, data, tp, expr).collect(),
conf=ansi_conf,
error_message='overflow')
else:
assert_gpu_and_cpu_are_equal_collect(
func=lambda spark: _get_overflow_df(spark, data, tp, expr),
conf=ansi_conf)
_data_type_expr_for_sub_overflow = [
([-128], ByteType(), 'a - 1Y'),
([-32768], ShortType(), 'a -1S'),
([-2147483648], IntegerType(), 'a - 1'),
([-9223372036854775808], LongType(), 'a - 1L'),
([-3.4028235E38], FloatType(), 'a - cast(1.0 as float)'),
([-1.7976931348623157E308], DoubleType(), 'a - 1.0')]
@pytest.mark.parametrize('data,tp,expr', _data_type_expr_for_sub_overflow)
def test_subtraction_overflow_with_ansi_enabled(data, tp, expr):
ansi_conf = {'spark.sql.ansi.enabled': 'true'}
if isinstance(tp, IntegralType):
assert_gpu_and_cpu_error(
lambda spark: _get_overflow_df(spark, data, tp, expr).collect(),
conf=ansi_conf,
error_message='overflow')
else:
assert_gpu_and_cpu_are_equal_collect(
func=lambda spark: _get_overflow_df(spark, data, tp, expr),
conf=ansi_conf)
@allow_non_gpu('ProjectExec', 'Alias', 'CheckOverflow', 'Add', 'PromotePrecision', 'Cast')
@pytest.mark.parametrize('data,tp,expr', _data_type_expr_for_add_overflow[12:])
@pytest.mark.parametrize('ansi_enabled', ['false','true'])
def test_add_overflow_fallback_for_decimal(data, tp, expr, ansi_enabled):
# Spark will try to promote the precision (to 19) which GPU does not supported now.
assert_gpu_fallback_collect(
lambda spark: _get_overflow_df(spark, data, tp, expr),
'ProjectExec',
conf={'spark.sql.ansi.enabled': ansi_enabled})
| 46.243935
| 166
| 0.684026
| 4,991
| 34,313
| 4.387898
| 0.085354
| 0.065845
| 0.073836
| 0.052055
| 0.846119
| 0.818767
| 0.794612
| 0.741644
| 0.729863
| 0.716667
| 0
| 0.020291
| 0.185615
| 34,313
| 741
| 167
| 46.306343
| 0.76342
| 0.08233
| 0
| 0.556291
| 0
| 0
| 0.111387
| 0.017939
| 0
| 0
| 0
| 0
| 0.127483
| 1
| 0.125828
| false
| 0
| 0.014901
| 0.003311
| 0.14404
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0ee1f7aa39f75ab18856f6c42c0a00ff572a7ec1
| 211
|
py
|
Python
|
coingecko/models/enums/__init__.py
|
kkristof200/py_coingecko
|
ea289fc738c1b5c077a1ebcb422319527a2545ff
|
[
"MIT"
] | null | null | null |
coingecko/models/enums/__init__.py
|
kkristof200/py_coingecko
|
ea289fc738c1b5c077a1ebcb422319527a2545ff
|
[
"MIT"
] | null | null | null |
coingecko/models/enums/__init__.py
|
kkristof200/py_coingecko
|
ea289fc738c1b5c077a1ebcb422319527a2545ff
|
[
"MIT"
] | null | null | null |
from .sort_type import SortType
from .filter_price import FilterPrice
from .filter_24h_volume import Filter24hVolume
from .filter_24h_change import Filter24hChange
from .filter_market_cap import FilterMarketCap
| 35.166667
| 46
| 0.881517
| 28
| 211
| 6.357143
| 0.571429
| 0.224719
| 0.146067
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041885
| 0.094787
| 211
| 6
| 47
| 35.166667
| 0.890052
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0ee652b8edb748df2346216e410b160d9ac14ffe
| 1,941
|
py
|
Python
|
source/algorithms/bfs_vision.py
|
t3bol90/I2AI-Project-01
|
593df8eee47b204ce5686a6587e79fed404ec837
|
[
"MIT"
] | 1
|
2021-09-23T09:42:37.000Z
|
2021-09-23T09:42:37.000Z
|
source/algorithms/bfs_vision.py
|
t3bol90/I2AI-Project-01
|
593df8eee47b204ce5686a6587e79fed404ec837
|
[
"MIT"
] | null | null | null |
source/algorithms/bfs_vision.py
|
t3bol90/I2AI-Project-01
|
593df8eee47b204ce5686a6587e79fed404ec837
|
[
"MIT"
] | null | null | null |
from collections import deque
def get_vision(_map:list,start_pos:tuple,n_row:int,n_col:int):
q = deque()
visited = [[False]* n_col for _ in range(n_row)]
dist = [[0]* n_col for _ in range(n_row)]
q.append(start_pos)
visited[start_pos[0]][start_pos[1]] = True
ans = []
foods = []
monster = []
distx = [0,0,1,-1]
disty = [1,-1,0,0]
def is_valid(_x,_y):
return x in range(n_col) and y in range(n_row)
while q:
top = q.popleft()
ans.append(top)
if _map[top[0]][top[1]] == 2:
foods.append(top)
elif _map[top[0]][top[1]] == 3:
monster.append(top)
for dx,dy in zip(distx,disty):
x = top[0] + dx
y = top[1] + dy
if is_valid(x,y) and not visited[x][y]:
dist[x][y] = dist[top[0]][top[1]] + 1
if (dist[x][y] > 3):
continue
q.append((x,y))
visited[x][y] = True
return ans,foods,monster
if __name__ == '__main__':
start_pos = (7,7)
# des_pos = (14,14)
_map = [[1,1, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0],
[0, 2, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 2, 0, 0, 0, 0],
[1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0]]
print(_map)
print(get_vision(_map,start_pos,15,15))
| 36.622642
| 62
| 0.419887
| 407
| 1,941
| 1.911548
| 0.140049
| 0.424165
| 0.543702
| 0.622108
| 0.362468
| 0.33162
| 0.320051
| 0.264781
| 0.237789
| 0.226221
| 0
| 0.204925
| 0.351365
| 1,941
| 53
| 63
| 36.622642
| 0.413026
| 0.008758
| 0
| 0.12
| 0
| 0
| 0.00416
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.02
| 0.02
| 0.1
| 0.04
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
16533506dc8aa3695aa247b36ff0036f04cd1672
| 44
|
py
|
Python
|
mlpug/tensorflow/trainers/callbacks/basic.py
|
nuhame/ml-pug
|
ed73b337b90759bdb92a6c441c6da49d689a2cca
|
[
"Apache-2.0"
] | 4
|
2019-12-30T16:12:06.000Z
|
2022-03-25T15:25:49.000Z
|
mlpug/tensorflow/trainers/callbacks/basic.py
|
nuhame/mlpug
|
be9f7c55f7d6616af5303e9350cfd8092d55440b
|
[
"Apache-2.0"
] | null | null | null |
mlpug/tensorflow/trainers/callbacks/basic.py
|
nuhame/mlpug
|
be9f7c55f7d6616af5303e9350cfd8092d55440b
|
[
"Apache-2.0"
] | null | null | null |
from mlpug.trainers.callbacks.basic import *
| 44
| 44
| 0.840909
| 6
| 44
| 6.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 44
| 1
| 44
| 44
| 0.902439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
166f8dacb06ba501830b915906f9ddbfc3f3bf22
| 5,169
|
py
|
Python
|
porerefiner/protocols/porerefiner/rpc/porerefiner_grpc.py
|
CFSAN-Biostatistics/porerefiner
|
64f96498bd6c036cfac46def1d9d94362001e67c
|
[
"MIT"
] | 8
|
2019-10-10T20:05:18.000Z
|
2021-02-19T21:53:43.000Z
|
porerefiner/protocols/porerefiner/rpc/porerefiner_grpc.py
|
CFSAN-Biostatistics/porerefiner
|
64f96498bd6c036cfac46def1d9d94362001e67c
|
[
"MIT"
] | 2
|
2020-07-17T07:24:17.000Z
|
2021-02-19T22:28:12.000Z
|
porerefiner/protocols/porerefiner/rpc/porerefiner_grpc.py
|
CFSAN-Biostatistics/porerefiner
|
64f96498bd6c036cfac46def1d9d94362001e67c
|
[
"MIT"
] | 2
|
2019-10-01T15:45:59.000Z
|
2019-10-28T19:15:32.000Z
|
# Generated by the Protocol Buffers compiler. DO NOT EDIT!
# source: porerefiner/protocols/porerefiner/rpc/porerefiner.proto
# plugin: grpclib.plugin.main
import abc
import typing
import grpclib.const
import grpclib.client
if typing.TYPE_CHECKING:
import grpclib.server
import google.protobuf.timestamp_pb2
import google.protobuf.duration_pb2
import porerefiner.protocols.porerefiner.rpc.porerefiner_pb2
class PoreRefinerBase(abc.ABC):
@abc.abstractmethod
async def GetRuns(self, stream: 'grpclib.server.Stream[porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunListRequest, porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunListResponse]') -> None:
pass
@abc.abstractmethod
async def GetRunInfo(self, stream: 'grpclib.server.Stream[porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunRequest, porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunResponse]') -> None:
pass
@abc.abstractmethod
async def AttachSheetToRun(self, stream: 'grpclib.server.Stream[porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunAttachRequest, porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.GenericResponse]') -> None:
pass
@abc.abstractmethod
async def RsyncRunTo(self, stream: 'grpclib.server.Stream[porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunRsyncRequest, porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunRsyncResponse]') -> None:
pass
@abc.abstractmethod
async def Tag(self, stream: 'grpclib.server.Stream[porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.TagRequest, porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.GenericResponse]') -> None:
pass
def __mapping__(self) -> typing.Dict[str, grpclib.const.Handler]:
return {
'/porerefiner.rpc.PoreRefiner/GetRuns': grpclib.const.Handler(
self.GetRuns,
grpclib.const.Cardinality.UNARY_UNARY,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunListRequest,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunListResponse,
),
'/porerefiner.rpc.PoreRefiner/GetRunInfo': grpclib.const.Handler(
self.GetRunInfo,
grpclib.const.Cardinality.UNARY_UNARY,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunRequest,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunResponse,
),
'/porerefiner.rpc.PoreRefiner/AttachSheetToRun': grpclib.const.Handler(
self.AttachSheetToRun,
grpclib.const.Cardinality.UNARY_UNARY,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunAttachRequest,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.GenericResponse,
),
'/porerefiner.rpc.PoreRefiner/RsyncRunTo': grpclib.const.Handler(
self.RsyncRunTo,
grpclib.const.Cardinality.UNARY_UNARY,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunRsyncRequest,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunRsyncResponse,
),
'/porerefiner.rpc.PoreRefiner/Tag': grpclib.const.Handler(
self.Tag,
grpclib.const.Cardinality.UNARY_UNARY,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.TagRequest,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.GenericResponse,
),
}
class PoreRefinerStub:
def __init__(self, channel: grpclib.client.Channel) -> None:
self.GetRuns = grpclib.client.UnaryUnaryMethod(
channel,
'/porerefiner.rpc.PoreRefiner/GetRuns',
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunListRequest,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunListResponse,
)
self.GetRunInfo = grpclib.client.UnaryUnaryMethod(
channel,
'/porerefiner.rpc.PoreRefiner/GetRunInfo',
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunRequest,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunResponse,
)
self.AttachSheetToRun = grpclib.client.UnaryUnaryMethod(
channel,
'/porerefiner.rpc.PoreRefiner/AttachSheetToRun',
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunAttachRequest,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.GenericResponse,
)
self.RsyncRunTo = grpclib.client.UnaryUnaryMethod(
channel,
'/porerefiner.rpc.PoreRefiner/RsyncRunTo',
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunRsyncRequest,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.RunRsyncResponse,
)
self.Tag = grpclib.client.UnaryUnaryMethod(
channel,
'/porerefiner.rpc.PoreRefiner/Tag',
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.TagRequest,
porerefiner.protocols.porerefiner.rpc.porerefiner_pb2.GenericResponse,
)
| 48.308411
| 221
| 0.705552
| 468
| 5,169
| 7.692308
| 0.138889
| 0.163333
| 0.291667
| 0.302222
| 0.749722
| 0.737222
| 0.689444
| 0.604722
| 0.604722
| 0.600278
| 0
| 0.008027
| 0.204682
| 5,169
| 106
| 222
| 48.764151
| 0.867672
| 0.028632
| 0
| 0.5
| 1
| 0.055556
| 0.235998
| 0.235001
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022222
| false
| 0.055556
| 0.088889
| 0.011111
| 0.144444
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
16764404fe5a6cbca8a6ef6f7f6a430153aecf94
| 62
|
py
|
Python
|
absl/detailed/tests/a_test.py
|
jaximan/abseil-py
|
6493f8b2f5ce3887ce184348fb7cc4c0f8b20e44
|
[
"Apache-2.0"
] | null | null | null |
absl/detailed/tests/a_test.py
|
jaximan/abseil-py
|
6493f8b2f5ce3887ce184348fb7cc4c0f8b20e44
|
[
"Apache-2.0"
] | null | null | null |
absl/detailed/tests/a_test.py
|
jaximan/abseil-py
|
6493f8b2f5ce3887ce184348fb7cc4c0f8b20e44
|
[
"Apache-2.0"
] | null | null | null |
from absl.detailed import a
def test_a():
a.something()
| 10.333333
| 27
| 0.677419
| 10
| 62
| 4.1
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.209677
| 62
| 5
| 28
| 12.4
| 0.836735
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
169fadb91fcee25b503c57aad882d6dfea620f3c
| 4,375
|
py
|
Python
|
unittests/test_plugin.py
|
gcurtis79/OctoPrint-DiscordRemote
|
1af667648a5161633f5484f656783cd03858e798
|
[
"MIT"
] | null | null | null |
unittests/test_plugin.py
|
gcurtis79/OctoPrint-DiscordRemote
|
1af667648a5161633f5484f656783cd03858e798
|
[
"MIT"
] | null | null | null |
unittests/test_plugin.py
|
gcurtis79/OctoPrint-DiscordRemote
|
1af667648a5161633f5484f656783cd03858e798
|
[
"MIT"
] | null | null | null |
import mock
from octoprint_discordremote import DiscordRemotePlugin
from unittests.discordremotetestcase import DiscordRemoteTestCase
def mock_global_get_boolean(array):
return {
str(['webcam', 'flipV']): False,
str(['webcam', 'flipH']): False,
str(['webcam', 'rotate90']): False,
}[str(array)]
class TestCommand(DiscordRemoteTestCase):
def test_plugin_get_snapshot_http(self):
plugin = DiscordRemotePlugin()
plugin._settings = mock.Mock()
plugin._settings.global_get = mock.Mock()
plugin._settings.global_get.return_value = "http://ValidSnapshot"
plugin._settings.global_get_boolean = mock_global_get_boolean
plugin._logger = mock.Mock()
with open("unittests/test_pattern.png", "rb") as f:
file_data = f.read()
with mock.patch("requests.get") as mock_requests_get:
mock_requests_get.return_value = mock.Mock()
mock_requests_get.return_value.content = file_data
snapshots = plugin.get_snapshot()
self.assertIsNotNone(snapshots)
self.assertEqual(1, len(snapshots))
snapshot = snapshots[0]
self.assertEqual(2, len(snapshot))
self.assertEqual("snapshot.png", snapshot[0])
snapshot_data = snapshot[1].read()
self.assertEqual(len(file_data), len(snapshot_data))
self.assertEqual([file_data], [snapshot_data])
def test_plugin_get_snapshot_file(self):
plugin = DiscordRemotePlugin()
plugin._settings = mock.Mock()
plugin._settings.global_get = mock.Mock()
plugin._settings.global_get.return_value = "file://unittests/test_pattern.png"
plugin._settings.global_get_boolean = mock_global_get_boolean
plugin._logger = mock.Mock()
with open("unittests/test_pattern.png", "rb") as f:
file_data = f.read()
snapshots = plugin.get_snapshot()
self.assertIsNotNone(snapshots)
self.assertEqual(1, len(snapshots))
snapshot = snapshots[0]
self.assertEqual(2, len(snapshot))
self.assertEqual("snapshot.png", snapshot[0])
snapshot_data = snapshot[1].read()
self.assertEqual(len(file_data), len(snapshot_data))
self.assertEqual([file_data], [snapshot_data])
def test_plugin_get_printer_name(self):
plugin = DiscordRemotePlugin()
plugin._settings = mock.Mock()
plugin._settings.global_get = mock.Mock()
plugin._settings.global_get.return_value = "DiscordBot"
self.assertEqual(plugin._settings.global_get.return_value, plugin.get_printer_name())
plugin._settings.global_get.return_value = None
self.assertEqual("OctoPrint", plugin.get_printer_name())
def test_get_print_time_spent(self):
plugin = DiscordRemotePlugin()
plugin._printer = mock.Mock()
plugin._printer.get_current_data = mock.Mock()
plugin._printer.get_current_data.return_value = {}
self.assertEqual('Unknown', plugin.get_print_time_spent())
plugin._printer.get_current_data.return_value = {'progress': {}}
self.assertEqual('Unknown', plugin.get_print_time_spent())
plugin._printer.get_current_data.return_value = {'progress': {'printTime': None}}
self.assertEqual('Unknown', plugin.get_print_time_remaining())
plugin._printer.get_current_data.return_value = {'progress': {'printTime': 1234}}
self.assertEqual('20 minutes and 34 seconds', plugin.get_print_time_spent())
def test_get_print_time_remaining(self):
plugin = DiscordRemotePlugin()
plugin._printer = mock.Mock()
plugin._printer.get_current_data = mock.Mock()
plugin._printer.get_current_data.return_value = {}
self.assertEqual('Unknown', plugin.get_print_time_remaining())
plugin._printer.get_current_data.return_value = {'progress': {}}
self.assertEqual('Unknown', plugin.get_print_time_remaining())
plugin._printer.get_current_data.return_value = {'progress': {'printTimeLeft': None}}
self.assertEqual('Unknown', plugin.get_print_time_remaining())
plugin._printer.get_current_data.return_value = {'progress': {'printTimeLeft': 1234}}
self.assertEqual('20 minutes and 34 seconds', plugin.get_print_time_remaining())
| 39.772727
| 93
| 0.68
| 493
| 4,375
| 5.718053
| 0.141988
| 0.106421
| 0.049663
| 0.081589
| 0.824406
| 0.776162
| 0.75204
| 0.75204
| 0.75204
| 0.745654
| 0
| 0.008041
| 0.204114
| 4,375
| 109
| 94
| 40.137615
| 0.801551
| 0
| 0
| 0.62963
| 0
| 0
| 0.087832
| 0.019442
| 0
| 0
| 0
| 0
| 0.271605
| 1
| 0.074074
| false
| 0
| 0.037037
| 0.012346
| 0.135802
| 0.320988
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
16f07f84a87de7f3da377aa163b832c31b4c5917
| 13,249
|
py
|
Python
|
src/tests/control/test_settings.py
|
upsidedownpancake/pretix
|
bfeeb1028c9eccab4936029db7c38edd4cd5aad5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/tests/control/test_settings.py
|
upsidedownpancake/pretix
|
bfeeb1028c9eccab4936029db7c38edd4cd5aad5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/tests/control/test_settings.py
|
upsidedownpancake/pretix
|
bfeeb1028c9eccab4936029db7c38edd4cd5aad5
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2017-08-09T17:11:28.000Z
|
2017-08-09T17:11:28.000Z
|
import datetime
import json
import re
from tests.base import SoupTest
from pretix.base.models import Event, Organizer, Team, User
class MailSettingPreviewTest(SoupTest):
def setUp(self):
self.user = User.objects.create_user('dummy@dummy.dummy', 'dummy')
self.orga1 = Organizer.objects.create(name='CCC', slug='ccc')
self.orga2 = Organizer.objects.create(name='MRM', slug='mrm')
self.event1 = Event.objects.create(
organizer=self.orga1, name='30C3', slug='30c3',
date_from=datetime.datetime(2013, 12, 26, tzinfo=datetime.timezone.utc),
)
# event with locale
self.locale_event = Event.objects.create(
organizer=self.orga1, name={'en': '40C4-en', 'de-informal': '40C4-de'}, slug='40c4',
date_from=datetime.datetime(2013, 12, 26, tzinfo=datetime.timezone.utc),
)
self.locale_event.settings.locales = ['en', 'de-informal']
self.locale_event.save()
t = Team.objects.create(organizer=self.orga1, can_change_items=True, can_change_event_settings=True)
t.members.add(self.user)
t.limit_events.add(self.locale_event)
t.limit_events.add(self.event1)
self.client.login(email='dummy@dummy.dummy', password='dummy')
self.target = '/control/event/{}/{}/settings/email/preview'
def test_permission(self):
self.event2 = Event.objects.create(
organizer=self.orga2, name='30M3', slug='30m3',
date_from=datetime.datetime(2013, 12, 26, tzinfo=datetime.timezone.utc),
)
response = self.client.post(self.target.format(
self.orga2.slug, self.event2.slug), {
'test': 'test1'
})
assert response.status_code == 404
def test_missing_item_key(self):
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'dummy',
'mail_text_order_free_0': 'sss',
'mail_text_order_free_1': 'ttt'
})
assert response.status_code == 400
def test_invalid_item_field(self):
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'mail_text_order_free',
'mail_text_order_free_w': 'sss'
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_free'
assert len(res['msgs']) == 0
def test_invalid_language_index(self):
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'mail_text_order_free',
'mail_text_order_free_1': 'sss'
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_free'
assert len(res['msgs']) == 0
def test_no_item_field(self):
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'mail_text_order_free_0': 'sss'
})
assert response.status_code == 400
def test_only_en(self):
dummy_text = 'This is dummy sentence for test'
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'mail_text_order_free',
'mail_text_order_free_0': dummy_text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_free'
assert len(res['msgs']) == 1
assert res['msgs']['en'] == dummy_text
def test_multiple_languages(self):
dummy_text = 'This is dummy sentence for test'
response = self.client.post(self.target.format(
self.orga1.slug, self.locale_event.slug), {
'item': 'mail_text_order_free',
'mail_text_order_free_0': dummy_text,
'mail_text_order_free_2': dummy_text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_free'
assert len(res['msgs']) == 2
assert res['msgs']['en'] == dummy_text
assert res['msgs']['de-informal'] == dummy_text
def test_i18n_placeholders(self):
dummy_text = '{event}'
response = self.client.post(self.target.format(
self.orga1.slug, self.locale_event.slug), {
'item': 'mail_text_order_placed',
'mail_text_order_placed_0': dummy_text,
'mail_text_order_placed_2': dummy_text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_placed'
assert len(res['msgs']) == 2
assert res['msgs']['en'] == self.locale_event.name['en']
assert res['msgs']['de-informal'] == self.locale_event.name['de-informal']
def test_i18n_locale_order(self):
self.locale_event.settings.locales = ['de-informal', 'en']
self.locale_event.save()
dummy_text = '{event}'
response = self.client.post(self.target.format(
self.orga1.slug, self.locale_event.slug), {
'item': 'mail_text_order_placed',
'mail_text_order_placed_0': dummy_text,
'mail_text_order_placed_2': dummy_text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_placed'
assert len(res['msgs']) == 2
assert res['msgs']['de-informal'] == self.locale_event.name['de-informal']
assert res['msgs']['en'] == self.locale_event.name['en']
def test_mail_text_order_placed(self):
text = '{event}{total}{currency}{date}{payment_info}{url}{invoice_name}{invoice_company}'
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'mail_text_order_placed',
'mail_text_order_placed_0': text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_placed'
assert len(res['msgs']) == 1
assert re.match('.*{.*}.*', res['msgs']['en']) is None
def test_mail_text_order_paid(self):
text = '{event}{url}{invoice_name}{invoice_company}{payment_info}'
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'mail_text_order_paid',
'mail_text_order_paid_0': text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_paid'
assert len(res['msgs']) == 1
assert re.match('.*{.*}.*', res['msgs']['en']) is None
def test_mail_text_order_free(self):
text = '{event}{url}{invoice_name}{invoice_company}'
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'mail_text_order_free',
'mail_text_order_free_0': text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_free'
assert len(res['msgs']) == 1
assert re.match('.*{.*}.*', res['msgs']['en']) is None
def test_mail_text_resend_link(self):
text = '{event}{url}{invoice_name}{invoice_company}'
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'mail_text_resend_link',
'mail_text_resend_link_0': text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_resend_link'
assert len(res['msgs']) == 1
assert re.match('.*{.*}.*', res['msgs']['en']) is None
def test_mail_text_resend_all_links(self):
text = '{event}{orders}'
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'mail_text_resend_all_links',
'mail_text_resend_all_links_0': text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_resend_all_links'
assert len(res['msgs']) == 1
assert re.match('.*{.*}.*', res['msgs']['en']) is None
def test_mail_text_order_changed(self):
text = '{event}{url}{invoice_name}{invoice_company}'
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'mail_text_order_changed',
'mail_text_order_changed_0': text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_changed'
assert len(res['msgs']) == 1
assert re.match('.*{.*}.*', res['msgs']['en']) is None
def test_mail_text_order_expire_warning(self):
text = '{event}{url}{expire_date}{invoice_name}{invoice_company}'
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'mail_text_order_expire_warning',
'mail_text_order_expire_warning_0': text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_expire_warning'
assert len(res['msgs']) == 1
assert re.match('.*{.*}.*', res['msgs']['en']) is None
def test_mail_text_waiting_list(self):
text = '{event}{url}{product}{hours}{code}'
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'mail_text_waiting_list',
'mail_text_waiting_list_0': text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_waiting_list'
assert len(res['msgs']) == 1
assert re.match('.*{.*}.*', res['msgs']['en']) is None
def test_mail_text_order_canceled(self):
text = '{event}{code}{url}'
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'mail_text_order_canceled',
'mail_text_order_canceled_0': text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_canceled'
assert len(res['msgs']) == 1
assert re.match('.*{.*}.*', res['msgs']['en']) is None
def test_unsupported_placeholders(self):
text = '{event1}'
response = self.client.post(self.target.format(
self.orga1.slug, self.event1.slug), {
'item': 'mail_text_waiting_list',
'mail_text_waiting_list_0': text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_waiting_list'
assert len(res['msgs']) == 1
assert res['msgs']['en'] == text
def test_localised_date(self):
dummy_text = '{date}'
response = self.client.post(self.target.format(
self.orga1.slug, self.locale_event.slug), {
'item': 'mail_text_order_placed',
'mail_text_order_placed_0': dummy_text,
'mail_text_order_placed_2': dummy_text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_placed'
assert len(res['msgs']) == 2
assert res['msgs']['en'] != res['msgs']['de-informal']
def test_localised_expire_date(self):
dummy_text = '{expire_date}'
response = self.client.post(self.target.format(
self.orga1.slug, self.locale_event.slug), {
'item': 'mail_text_order_expire_warning',
'mail_text_order_expire_warning_0': dummy_text,
'mail_text_order_expire_warning_2': dummy_text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_expire_warning'
assert len(res['msgs']) == 2
assert res['msgs']['en'] != res['msgs']['de-informal']
def test_localised_payment_info(self):
dummy_text = '{payment_info}'
response = self.client.post(self.target.format(
self.orga1.slug, self.locale_event.slug), {
'item': 'mail_text_order_paid',
'mail_text_order_paid_0': dummy_text,
'mail_text_order_paid_2': dummy_text
})
assert response.status_code == 200
res = json.loads(response.content.decode())
assert res['item'] == 'mail_text_order_paid'
assert len(res['msgs']) == 2
assert res['msgs']['en'] != res['msgs']['de-informal']
| 42.194268
| 108
| 0.603517
| 1,636
| 13,249
| 4.646088
| 0.082518
| 0.078937
| 0.102618
| 0.067096
| 0.838574
| 0.793711
| 0.776214
| 0.756743
| 0.745823
| 0.733851
| 0
| 0.021154
| 0.250736
| 13,249
| 313
| 109
| 42.329073
| 0.744535
| 0.001283
| 0
| 0.673684
| 0
| 0
| 0.208541
| 0.124641
| 0
| 0
| 0
| 0
| 0.280702
| 1
| 0.080702
| false
| 0.003509
| 0.017544
| 0
| 0.101754
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bc4c0f9701ad9ef4468bd1f3ac3d19465ed6921c
| 33
|
py
|
Python
|
nepse/security/__init__.py
|
thenishantsapkota/nepse-api
|
d7b325d2eaecaae16e3859dd50012507dc3b3afa
|
[
"MIT"
] | 28
|
2021-05-30T15:45:21.000Z
|
2021-08-03T13:21:14.000Z
|
nepse/security/__init__.py
|
razesh66/nepse-api
|
e0aaef402b00b9c07b4e0a3e18ef5bc20beba5c3
|
[
"MIT"
] | 27
|
2021-06-03T09:35:28.000Z
|
2021-07-17T21:03:01.000Z
|
nepse/security/__init__.py
|
razesh66/nepse-api
|
e0aaef402b00b9c07b4e0a3e18ef5bc20beba5c3
|
[
"MIT"
] | 9
|
2021-06-02T09:18:24.000Z
|
2021-07-17T04:44:40.000Z
|
from .core import SecurityClient
| 16.5
| 32
| 0.848485
| 4
| 33
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bc4d14fa2c63d223cf4cced52755b6a82e1a75c3
| 142
|
py
|
Python
|
emiz/weather/__init__.py
|
theendsofinvention/emiz
|
98b210dd36053ce8062d54e8c501ca4715cd78b5
|
[
"MIT"
] | null | null | null |
emiz/weather/__init__.py
|
theendsofinvention/emiz
|
98b210dd36053ce8062d54e8c501ca4715cd78b5
|
[
"MIT"
] | 5
|
2020-03-24T16:34:15.000Z
|
2020-06-26T08:31:46.000Z
|
emiz/weather/__init__.py
|
theendsofinvention/emiz
|
98b210dd36053ce8062d54e8c501ca4715cd78b5
|
[
"MIT"
] | 1
|
2018-04-01T16:02:13.000Z
|
2018-04-01T16:02:13.000Z
|
# coding=utf-8
"""
Manage mission weather
"""
from . import avwx, custom_metar, mission_weather, mizfile, noaa, utils
from .avwx import AVWX
| 17.75
| 71
| 0.739437
| 20
| 142
| 5.15
| 0.7
| 0.271845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008264
| 0.147887
| 142
| 7
| 72
| 20.285714
| 0.842975
| 0.253521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bc6819a7895fbbe77e97c4fb8f796d97001a7bdd
| 173
|
py
|
Python
|
chapter03/3-2.py
|
alberthao/Python-Crash-Course-Homework
|
105ffb3075db075425d6cf0d08d9837ef0548866
|
[
"MIT"
] | 138
|
2019-07-26T13:42:31.000Z
|
2021-04-13T23:51:49.000Z
|
chapter03/3-2.py
|
alberthao/Python-Crash-Course-Homework
|
105ffb3075db075425d6cf0d08d9837ef0548866
|
[
"MIT"
] | 6
|
2019-07-20T13:47:47.000Z
|
2019-08-04T06:49:06.000Z
|
chapter03/3-2.py
|
alberthao/Python-Crash-Course-Homework
|
105ffb3075db075425d6cf0d08d9837ef0548866
|
[
"MIT"
] | 51
|
2019-07-26T09:46:28.000Z
|
2021-03-29T07:58:16.000Z
|
names = ['David','Herry','Army']
message1 = "hello " + names[0]
print(message1)
message1 = "hello " + names[1]
print(message1)
message1 = "hello " + names[2]
print(message1)
| 24.714286
| 32
| 0.66474
| 22
| 173
| 5.227273
| 0.454545
| 0.33913
| 0.469565
| 0.452174
| 0.53913
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 0.132948
| 173
| 7
| 33
| 24.714286
| 0.706667
| 0
| 0
| 0.428571
| 0
| 0
| 0.183908
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
bc83150b8a0c79e32ceeedbbd51df8bb1b5e156c
| 7,964
|
py
|
Python
|
BCmetric/angleDistribution.py
|
visdata/UrbanMotionAnalysis
|
423357bb3d8369e174386174aa6209e32473836c
|
[
"Apache-2.0"
] | null | null | null |
BCmetric/angleDistribution.py
|
visdata/UrbanMotionAnalysis
|
423357bb3d8369e174386174aa6209e32473836c
|
[
"Apache-2.0"
] | null | null | null |
BCmetric/angleDistribution.py
|
visdata/UrbanMotionAnalysis
|
423357bb3d8369e174386174aa6209e32473836c
|
[
"Apache-2.0"
] | 1
|
2020-04-02T13:16:19.000Z
|
2020-04-02T13:16:19.000Z
|
__author__ = 'lenovo'
import numpy as np
import matplotlib.pyplot as pyplot
#countZero = [288.0, 102.0, 95.0, 251.0, 259.0, 355.0, 256.0, 259.0, 89.0, 106.0, 104.0, 242.0, 275.0, 274.0, 89.0, 92.0, 270.0, 254.0, 96.0, 86.0, 277.0, 259.0, 92.0, 273.0, 90.0, 91.0, 29.0, 288.0, 95.0, 80.0, 272.0, 87.0, 355.0, 282.0, 77.0, 82.0, 95.0, 80.0, 275.0, 283.0, 275.0, 79.0, 90.0, 286.0, 272.0, 81.0, 82.0, 94.0, 273.0, 112.0, 86.0]
#countZero = [308.0, 283.0, 141.0, 140.0, 120.0, 296.0, 122.0, 137.0, 297.0, 304.0, 312.0, 119.0, 122.0, 312.0, 127.0, 121.0, 120.0, 161.0, 332.0, 303.0, 302.0, 119.0, 184.0, 129.0, 308.0, 111.0, 129.0, 307.0, 128.0, 339.0, 299.0, 316.0, 271.0, 299.0, 209.0, 96.0, 301.0, 124.0, 140.0, 106.0, 125.0, 152.0, 300.0]
#countZero = [99.0, 96.0, 98.0, 99.0, 279.0, 287.0, 295.0, 287.0, 279.0, 279.0, 100.0, 102.0, 311.0, 277.0, 279.0, 96.0, 277.0, 287.0, 99.0, 99.0, 99.0, 280.0, 99.0, 99.0, 275.0, 279.0, 271.0, 279.0, 202.0, 97.0, 105.0, 96.0, 311.0, 277.0, 99.0, 96.0, 277.0, 99.0]
#countZero = [107.0, 90.0, 146.0, 326.0, 265.0, 235.0, 284.0, 182.0, 38.0, 302.0, 233.0, 357.0, 283.0, 256.0, 66.0, 276.0, 256.0, 97.0, 91.0, 90.0, 90.0, 146.0, 326.0, 235.0, 87.0, 103.0, 92.0, 182.0, 38.0, 108.0, 233.0, 357.0, 287.0, 356.0, 66.0, 276.0, 77.0]
#countZero = [182.0, 85.0, 193.0, 283.0, 174.0, 159.0, 347.0, 341.0, 60.0, 93.0, 154.0, 112.0, 183.0, 209.0, 84.0, 182.0, 296.0, 94.0, 76.0, 256.0, 127.0, 272.0, 348.0, 267.0, 174.0, 159.0, 347.0, 341.0, 60.0, 1.0, 21.0, 257.0, 112.0, 342.0, 15.0, 333.0, 349.0]
#countZero = [29.0, 271.0, 137.0, 75.0, 147.0, 147.0, 191.0, 254.0, 272.0, 144.0, 310.0, 288.0, 286.0, 91.0, 335.0, 147.0, 88.0, 90.0, 272.0, 271.0, 147.0, 356.0, 150.0, 147.0, 250.0, 191.0, 274.0, 90.0, 151.0, 310.0, 288.0, 41.0, 86.0, 154.0, 324.0]
#countZero = [134.0, 130.0, 143.0, 30.0, 201.0, 168.0, 137.0, 130.0, 150.0, 286.0, 142.0, 332.0, 142.0, 149.0, 260.0, 121.0, 14.0, 294.0, 313.0, 272.0, 30.0, 128.0, 168.0, 147.0, 306.0, 150.0, 310.0, 124.0, 262.0, 332.0, 296.0, 149.0, 260.0]
#countZero = [51.0, 216.0, 53.0, 97.0, 26.0, 2.0, 225.0, 190.0, 90.0, 270.0, 47.0, 80.0, 117.0, 51.0, 230.0, 230.0, 234.0, 50.0, 277.0, 56.0, 224.0, 53.0, 90.0, 270.0, 285.0, 80.0, 295.0]
#countZero = [330.0, 344.0, 241.0, 288.0, 264.0, 242.0, 66.0, 75.0, 66.0, 318.0, 316.0, 67.0, 333.0, 22.0, 264.0, 138.0, 288.0, 171.0, 242.0, 66.0, 241.0, 68.0, 248.0, 251.0, 198.0, 254.0, 279.0]
#countZero = [135.0, 33.0, 136.0, 90.0, 305.0, 317.0, 314.0, 334.0, 120.0, 216.0, 316.0, 132.0, 135.0, 61.0, 137.0, 135.0, 33.0, 309.0, 90.0, 305.0, 318.0, 58.0, 291.0, 318.0, 281.0, 132.0, 315.0]
countZero = [[207.0, 1], [169.0, 1], [25.0, 1], [291.0, 1], [224.0, 1], [119.0, 1], [276.0, 1], [324.0, 1], [157.0, 1], [164.0, 1], [292.0, 1], [318.0, 1], [305.0, 1], [282.0, 1], [241.0, 1], [291.0, 1], [329.0, 1], [210.0, 1], [49.0, 1], [282.0, 1], [1.0, 1], [324.0, 1], [117.0, 1]]
countZero = [[234.0, 1], [68.0, 1], [276.0, 1], [245.0, 1], [256.0, 1], [256.0, 1], [342.0, 1], [68.0, 1], [18.0, 1], [75.0, 1], [213.0, 1], [234.0, 1], [79.0, 1], [276.0, 1], [279.0, 1], [255.0, 1], [48.0, 1], [254.0, 1], [250.0, 1], [18.0, 1], [259.0, 1]]
countZero = [[299.0, 1], [225.0, 1], [85.0, 1], [296.0, 1], [69.0, 1], [287.0, 1], [288.0, 1], [59.0, 1], [268.0, 1], [6.0, 1], [108.0, 1], [299.0, 1], [225.0, 1], [85.0, 1], [301.0, 1], [284.0, 1], [288.0, 1], [219.0, 1], [105.0, 1], [102.0, 1], [264.0, 1]]
countZero = [[5.0, 1], [175.0, 1], [74.0, 1], [207.0, 1], [172.0, 1], [177.0, 1], [10.0, 1], [16.0, 1], [330.0, 1], [180.0, 1], [180.0, 1], [172.0, 1], [153.0, 1], [176.0, 1], [348.0, 1], [80.0, 1], [74.0, 1], [207.0, 1], [184.0, 1], [357.0, 1], [10.0, 1], [351.0, 1], [348.0, 1], [180.0, 1], [180.0, 1], [57.0, 1], [153.0, 1]]
countZero = [[256.0, 1], [293.0, 1], [322.0, 1], [270.0, 1], [63.0, 1], [233.0, 1], [30.0, 1], [0.0, 1], [210.0, 1], [331.0, 1], [211.0, 1], [238.0, 1], [108.0, 1], [117.0, 1], [108.0, 1], [108.0, 1], [83.0, 1], [78.0, 1], [60.0, 1], [231.0, 1], [121.0, 1], [173.0, 1], [223.0, 1], [6.0, 1], [138.0, 1], [256.0, 1], [123.0, 1], [291.0, 1], [132.0, 1], [237.0, 1], [305.0, 1], [270.0, 1], [223.0, 1], [327.0, 1], [283.0, 1], [270.0, 1], [225.0, 1], [296.0, 1], [73.0, 1], [145.0, 1], [225.0, 1], [286.0, 1], [137.0, 1], [136.0, 1], [117.0, 1], [13.0, 1], [293.0, 1], [322.0, 1], [270.0, 1], [63.0, 1], [233.0, 1], [30.0, 1], [0.0, 1], [281.0, 1], [283.0, 1], [287.0, 1], [33.0, 1], [108.0, 1], [0.0, 1], [51.0, 1], [199.0, 1], [29.0, 1], [60.0, 1], [309.0, 1], [347.0, 1], [43.0, 1], [6.0, 1], [138.0, 1], [256.0, 1], [123.0, 1], [291.0, 1], [132.0, 1], [237.0, 1], [103.0, 1], [270.0, 1], [30.0, 1], [272.0, 1], [327.0, 1], [283.0, 1], [270.0, 1], [225.0, 1], [296.0, 1], [129.0, 1], [30.0, 1], [280.0, 1], [261.0, 1], [287.0, 1], [137.0, 1]]
countZero = [[176.0, 1], [289.0, 1], [146.0, 1], [124.0, 1], [124.0, 1], [135.0, 1], [289.0, 1], [160.0, 1], [134.0, 1], [119.0, 1], [340.0, 1], [132.0, 1], [62.0, 1], [284.0, 1], [129.0, 1], [317.0, 1], [129.0, 1], [121.0, 1], [125.0, 1], [132.0, 1], [263.0, 1], [287.0, 1], [0.0, 1], [208.0, 1], [113.0, 1], [128.0, 1], [295.0, 1], [288.0, 1], [285.0, 1], [132.0, 1], [288.0, 1], [296.0, 1], [309.0, 1], [311.0, 1], [289.0, 1], [33.0, 1], [343.0, 1], [109.0, 1], [22.0, 1], [288.0, 1], [296.0, 1], [289.0, 1], [90.0, 1], [127.0, 1], [41.0, 1], [138.0, 1], [124.0, 1], [129.0, 1], [344.0, 1], [125.0, 1], [122.0, 1], [127.0, 1], [136.0, 1], [108.0, 1], [289.0, 1], [132.0, 1], [125.0, 1], [124.0, 1], [115.0, 1], [121.0, 1], [285.0, 1], [140.0, 1], [296.0, 1], [286.0, 1], [285.0, 1], [286.0, 1], [321.0, 1], [127.0, 1], [106.0, 1], [124.0, 1], [128.0, 1], [93.0, 1], [160.0, 1], [108.0, 1], [316.0, 1], [106.0, 1], [123.0, 1], [116.0, 1], [302.0, 1], [58.0, 1], [125.0, 1], [337.0, 1], [263.0, 1], [318.0, 1], [0.0, 1], [116.0, 1], [117.0, 1], [320.0, 1], [295.0, 1], [288.0, 1], [116.0, 1], [304.0, 1], [303.0, 1], [318.0, 1], [311.0, 1], [112.0, 1], [33.0, 1], [343.0, 1], [109.0, 1], [22.0, 1], [119.0, 1], [312.0, 1], [312.0, 1], [101.0, 1], [90.0, 1], [127.0, 1], [41.0, 1], [106.0, 1], [109.0, 1], [107.0, 1], [303.0, 1], [115.0, 1], [229.0, 1], [127.0, 1], [188.0, 1], [108.0, 1], [132.0, 1], [102.0, 1], [114.0, 1], [292.0, 1], [56.0, 1], [305.0, 1], [173.0, 1], [328.0, 1], [296.0, 1]]
countZero = [[235.0, 1], [169.0, 1], [90.0, 1], [171.0, 1], [328.0, 1], [351.0, 1], [317.0, 1], [191.0, 1], [181.0, 1], [201.0, 1], [346.0, 1], [218.0, 1], [144.0, 1], [73.0, 1], [322.0, 1], [13.0, 1], [335.0, 1], [59.0, 1], [259.0, 1], [331.0, 1], [51.0, 1], [305.0, 1], [191.0, 1], [267.0, 1], [242.0, 1], [292.0, 1], [7.0, 1], [286.0, 1], [121.0, 1], [13.0, 1], [43.0, 1], [50.0, 1], [8.0, 1], [237.0, 1], [235.0, 1], [225.0, 1], [168.0, 1], [13.0, 1], [89.0, 1], [179.0, 1], [186.0, 1], [10.0, 1], [315.0, 1], [297.0, 1], [81.0, 1], [251.0, 1], [80.0, 1], [100.0, 1], [221.0, 1], [161.0, 1], [187.0, 1], [74.0, 1], [160.0, 1], [186.0, 1], [61.0, 1], [243.0, 1], [290.0, 1], [252.0, 1], [189.0, 1], [103.0, 1], [106.0, 1], [294.0, 1], [270.0, 1], [331.0, 1], [259.0, 1], [195.0, 1], [350.0, 1], [179.0, 1], [169.0, 1], [90.0, 1], [257.0, 1], [328.0, 1], [34.0, 1], [126.0, 1], [283.0, 1], [147.0, 1], [222.0, 1], [57.0, 1], [5.0, 1], [81.0, 1], [259.0, 1], [50.0, 1], [282.0, 1], [242.0, 1], [285.0, 1], [7.0, 1], [100.0, 1], [13.0, 1], [30.0, 1], [35.0, 1], [252.0, 1], [214.0, 1], [237.0, 1], [153.0, 1], [225.0, 1], [168.0, 1], [13.0, 1], [245.0, 1], [147.0, 1], [186.0, 1], [10.0, 1], [315.0, 1], [244.0, 1], [31.0, 1], [286.0, 1], [10.0, 1], [213.0, 1], [236.0, 1], [55.0, 1], [161.0, 1], [52.0, 1], [3.0, 1], [88.0, 1], [221.0, 1], [243.0, 1], [3.0, 1], [252.0, 1], [189.0, 1], [103.0, 1], [337.0, 1], [0.0, 1], [270.0, 1], [331.0, 1]]
countZero = [elem[0] for elem in countZero]
pyplot.hist(countZero,30)
pyplot.xlabel('variance')
pyplot.xlim(0, 360)
pyplot.ylabel('Frenquency')
#pyplot.ylim(0, 30000)
pyplot.title('variance distribution from 7:00am to 10:00am')
pyplot.show()
| 221.222222
| 1,496
| 0.470116
| 2,059
| 7,964
| 1.816416
| 0.14813
| 0.229412
| 0.011765
| 0.012834
| 0.524332
| 0.157219
| 0.157219
| 0.150802
| 0.113904
| 0.106417
| 0
| 0.504042
| 0.161226
| 7,964
| 35
| 1,497
| 227.542857
| 0.055838
| 0.317052
| 0
| 0
| 0
| 0
| 0.01253
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.117647
| 0
| 0.117647
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bcac5875adb3a4bd019ea145e7890f98ab6444ed
| 40
|
py
|
Python
|
nametract/__init__.py
|
keddad/nametract
|
f86af89888d32c8d7a63b82e1e4b384a964ab7ec
|
[
"MIT"
] | null | null | null |
nametract/__init__.py
|
keddad/nametract
|
f86af89888d32c8d7a63b82e1e4b384a964ab7ec
|
[
"MIT"
] | null | null | null |
nametract/__init__.py
|
keddad/nametract
|
f86af89888d32c8d7a63b82e1e4b384a964ab7ec
|
[
"MIT"
] | null | null | null |
from nametract.extractor import extract
| 20
| 39
| 0.875
| 5
| 40
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 40
| 1
| 40
| 40
| 0.972222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bcd8e40e5ac74fbc5733e016a43c48cbbc564a01
| 173
|
py
|
Python
|
mmdet/utils/__init__.py
|
Joanna0123/QueryInst
|
6f75240610439e92bca5398054e3f7adc37bfd53
|
[
"MIT"
] | 326
|
2021-05-06T01:15:09.000Z
|
2022-03-30T14:52:13.000Z
|
mmdet/utils/__init__.py
|
Joanna0123/QueryInst
|
6f75240610439e92bca5398054e3f7adc37bfd53
|
[
"MIT"
] | 39
|
2021-05-20T02:54:40.000Z
|
2022-03-31T09:16:46.000Z
|
mmdet/utils/__init__.py
|
Joanna0123/QueryInst
|
6f75240610439e92bca5398054e3f7adc37bfd53
|
[
"MIT"
] | 46
|
2021-05-08T22:25:27.000Z
|
2022-03-28T08:11:51.000Z
|
from .collect_env import collect_env
from .logger import get_root_logger
from .optimizer import OptimizerHook
__all__ = ['get_root_logger', 'collect_env', 'OptimizerHook']
| 28.833333
| 61
| 0.815029
| 23
| 173
| 5.652174
| 0.434783
| 0.230769
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104046
| 173
| 5
| 62
| 34.6
| 0.83871
| 0
| 0
| 0
| 0
| 0
| 0.225434
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bcf3b282991580697e7dfbff8df2ebc9bc361520
| 383
|
py
|
Python
|
integration/tests/follow_redirect.py
|
youhavethewrong/hurl
|
91cc14882a5f1ef7fa86be09a9f5581cef680559
|
[
"Apache-2.0"
] | 1,013
|
2020-08-27T12:38:48.000Z
|
2022-03-31T23:12:23.000Z
|
integration/tests/follow_redirect.py
|
youhavethewrong/hurl
|
91cc14882a5f1ef7fa86be09a9f5581cef680559
|
[
"Apache-2.0"
] | 217
|
2020-08-31T11:18:10.000Z
|
2022-03-30T17:50:30.000Z
|
integration/tests/follow_redirect.py
|
youhavethewrong/hurl
|
91cc14882a5f1ef7fa86be09a9f5581cef680559
|
[
"Apache-2.0"
] | 54
|
2020-09-02T09:41:06.000Z
|
2022-03-19T15:33:05.000Z
|
from tests import app
from flask import redirect
@app.route('/follow-redirect')
def follow_redirect():
return redirect('http://localhost:8000/following-redirect')
@app.route('/following-redirect')
def following_redirect():
return redirect('http://localhost:8000/followed-redirect')
@app.route('/followed-redirect')
def followed_redirect():
return 'Followed redirect!'
| 25.533333
| 63
| 0.751958
| 46
| 383
| 6.195652
| 0.326087
| 0.224561
| 0.168421
| 0.182456
| 0.273684
| 0.273684
| 0
| 0
| 0
| 0
| 0
| 0.023256
| 0.101828
| 383
| 14
| 64
| 27.357143
| 0.805233
| 0
| 0
| 0
| 0
| 0
| 0.391645
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| true
| 0
| 0.181818
| 0.272727
| 0.727273
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
d5f003ebb229b29add66f51cc189584b30ea03fb
| 186
|
py
|
Python
|
src/inventorysystem/admin.py
|
pankhuriagarwal0204/erp
|
0a127bae6def7eb4df1303f41135d053259df5e6
|
[
"MIT"
] | null | null | null |
src/inventorysystem/admin.py
|
pankhuriagarwal0204/erp
|
0a127bae6def7eb4df1303f41135d053259df5e6
|
[
"MIT"
] | null | null | null |
src/inventorysystem/admin.py
|
pankhuriagarwal0204/erp
|
0a127bae6def7eb4df1303f41135d053259df5e6
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
import models
# Register your models here.
admin.site.register(models.ItemType)
admin.site.register(models.Item)
admin.site.register(models.Department)
| 23.25
| 38
| 0.822581
| 26
| 186
| 5.884615
| 0.5
| 0.176471
| 0.333333
| 0.45098
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080645
| 186
| 8
| 38
| 23.25
| 0.894737
| 0.139785
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
911824a9ae47941738cabe110b06bc943bf23bfe
| 9,782
|
py
|
Python
|
src/rclcpp_bench/launch/sub40str64.launch.py
|
rclex/rclcpp_bench
|
b383950259dea58b0c6a4836048cb668ea03955e
|
[
"Apache-2.0"
] | 1
|
2022-01-27T00:26:05.000Z
|
2022-01-27T00:26:05.000Z
|
src/rclcpp_bench/launch/sub40str64.launch.py
|
rclex/rclcpp_bench
|
b383950259dea58b0c6a4836048cb668ea03955e
|
[
"Apache-2.0"
] | 1
|
2022-01-27T05:00:14.000Z
|
2022-01-27T05:00:14.000Z
|
src/rclcpp_bench/launch/sub40str64.launch.py
|
rclex/rclcpp_bench
|
b383950259dea58b0c6a4836048cb668ea03955e
|
[
"Apache-2.0"
] | null | null | null |
from launch import LaunchDescription
from launch_ros.actions import Node
num_sub = '40'
str_length = '64'
def generate_launch_description():
return LaunchDescription([
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub00',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub00.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub01',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub01.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub02',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub02.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub03',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub03.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub04',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub04.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub05',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub05.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub06',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub06.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub07',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub07.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub08',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub08.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub09',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub09.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub10',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub10.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub11',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub11.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub12',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub12.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub13',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub13.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub14',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub14.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub15',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub15.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub16',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub16.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub17',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub17.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub18',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub18.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub19',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub19.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub20',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub20.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub21',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub21.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub22',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub22.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub23',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub23.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub24',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub24.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub25',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub25.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub26',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub26.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub27',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub27.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub28',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub28.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub29',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub29.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub30',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub30.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub31',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub31.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub32',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub32.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub33',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub33.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub34',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub34.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub35',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub35.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub36',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub36.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub37',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub37.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub38',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub38.csv']
),
Node(
package='rclcpp_bench',
executable='sub_string',
name='sub39',
arguments=['./results/string/p1sN/' +
str_length + '/' + num_sub + '/sub39.csv']
),
])
| 33.61512
| 65
| 0.435289
| 782
| 9,782
| 5.234015
| 0.085678
| 0.060103
| 0.166137
| 0.215001
| 0.869045
| 0.869045
| 0.869045
| 0.869045
| 0.457366
| 0
| 0
| 0.035714
| 0.41607
| 9,782
| 290
| 66
| 33.731034
| 0.680847
| 0
| 0
| 0.696864
| 1
| 0
| 0.245758
| 0.089961
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003484
| false
| 0
| 0.006969
| 0.003484
| 0.013937
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e6b959e01d71ccf5277000d0c3d2aa9ca982a7bf
| 164
|
py
|
Python
|
backend/app/utils/generate_string.py
|
wu-clan/fastapi_mysql_demo
|
efa3bdff73aa4d366da5f12dbb58c0221205e39b
|
[
"MIT"
] | null | null | null |
backend/app/utils/generate_string.py
|
wu-clan/fastapi_mysql_demo
|
efa3bdff73aa4d366da5f12dbb58c0221205e39b
|
[
"MIT"
] | null | null | null |
backend/app/utils/generate_string.py
|
wu-clan/fastapi_mysql_demo
|
efa3bdff73aa4d366da5f12dbb58c0221205e39b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import uuid
def get_uuid() -> str:
"""
生成uuid
:return: str(uuid)
"""
return str(uuid.uuid4())
| 12.615385
| 28
| 0.530488
| 21
| 164
| 4.095238
| 0.714286
| 0.209302
| 0.302326
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024793
| 0.262195
| 164
| 12
| 29
| 13.666667
| 0.68595
| 0.426829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
e6eb1269f016a1efd85b9a74e977c9205961c7be
| 483
|
py
|
Python
|
redirectHandler.py
|
divir94/News-Analytics
|
1fcf2b11e38f9b0c182160dfded7be44d5a7c8bb
|
[
"Apache-2.0"
] | null | null | null |
redirectHandler.py
|
divir94/News-Analytics
|
1fcf2b11e38f9b0c182160dfded7be44d5a7c8bb
|
[
"Apache-2.0"
] | null | null | null |
redirectHandler.py
|
divir94/News-Analytics
|
1fcf2b11e38f9b0c182160dfded7be44d5a7c8bb
|
[
"Apache-2.0"
] | null | null | null |
import urllib2
class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
def http_error_301(self, req, fp, code, msg, headers):
result = urllib2.HTTPRedirectHandler.http_error_301(self, req, fp, code, msg, headers)
result.status = code
return result
def http_error_302(self, req, fp, code, msg, headers):
result = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
result.status = code
return result
| 40.25
| 94
| 0.699793
| 60
| 483
| 5.5
| 0.316667
| 0.109091
| 0.109091
| 0.157576
| 0.787879
| 0.787879
| 0.787879
| 0.787879
| 0.787879
| 0.787879
| 0
| 0.041885
| 0.20911
| 483
| 12
| 95
| 40.25
| 0.82199
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.1
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
e6f2c5a2dd472e6f36adcbb8b94df13c0850de3b
| 9,908
|
py
|
Python
|
anchore_engine/services/apiext/api/controllers/image_imports.py
|
rbrady/anchore-engine
|
5a5c492d76b5f911e60be422912fe8d42a74872b
|
[
"Apache-2.0"
] | 1,484
|
2017-09-11T19:08:42.000Z
|
2022-03-29T07:47:44.000Z
|
anchore_engine/services/apiext/api/controllers/image_imports.py
|
rbrady/anchore-engine
|
5a5c492d76b5f911e60be422912fe8d42a74872b
|
[
"Apache-2.0"
] | 913
|
2017-09-27T20:37:53.000Z
|
2022-03-29T17:21:28.000Z
|
anchore_engine/services/apiext/api/controllers/image_imports.py
|
rbrady/anchore-engine
|
5a5c492d76b5f911e60be422912fe8d42a74872b
|
[
"Apache-2.0"
] | 294
|
2017-09-12T16:54:03.000Z
|
2022-03-14T01:28:51.000Z
|
import datetime
from connexion import request
from anchore_engine.apis import exceptions as api_exceptions
from anchore_engine.apis.authorization import (
ActionBoundPermission,
RequestingAccountValue,
get_authorizer,
)
from anchore_engine.apis.context import ApiRequestContextProxy
from anchore_engine.clients.services import internal_client_for
from anchore_engine.clients.services.catalog import CatalogClient
from anchore_engine.common.helpers import make_response_error
from anchore_engine.subsys import logger
authorizer = get_authorizer()
IMPORT_BUCKET = "image_content_imports"
MAX_UPLOAD_SIZE = 100 * 1024 * 1024 # 100 MB
OPERATION_EXPIRATION_DELTA = datetime.timedelta(hours=24)
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def create_operation():
"""
POST /imports/images
:return:
"""
try:
client = internal_client_for(
CatalogClient, userId=ApiRequestContextProxy.namespace()
)
resp = client.create_image_import()
return resp, 200
except api_exceptions.AnchoreApiError as ex:
return (
make_response_error(ex, in_httpcode=ex.__response_code__),
ex.__response_code__,
)
except Exception as ex:
logger.exception("Unexpected error in api processing")
return make_response_error(ex, in_httpcode=500), 500
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_operations():
"""
GET /imports/images
:return:
"""
try:
client = internal_client_for(
CatalogClient, userId=ApiRequestContextProxy.namespace()
)
resp = client.list_image_import_operations()
return resp, 200
except api_exceptions.AnchoreApiError as ex:
return (
make_response_error(ex, in_httpcode=ex.__response_code__),
ex.__response_code__,
)
except Exception as ex:
logger.exception("Unexpected error in api processing")
return make_response_error(ex, in_httpcode=500), 500
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_operation(operation_id):
"""
GET /imports/images/{operation_id}
:param operation_id:
:return:
"""
try:
client = internal_client_for(
CatalogClient, userId=ApiRequestContextProxy.namespace()
)
resp = client.get_image_import_operation(operation_id)
return resp, 200
except api_exceptions.AnchoreApiError as ex:
return (
make_response_error(ex, in_httpcode=ex.__response_code__),
ex.__response_code__,
)
except Exception as ex:
logger.exception("Unexpected error in api processing")
return make_response_error(ex, in_httpcode=500), 500
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def invalidate_operation(operation_id):
"""
DELETE /imports/images/{operation_id}
:param operation_id:
:return:
"""
try:
client = internal_client_for(
CatalogClient, userId=ApiRequestContextProxy.namespace()
)
resp = client.cancel_image_import(operation_id)
return resp, 200
except api_exceptions.AnchoreApiError as ex:
return (
make_response_error(ex, in_httpcode=ex.__response_code__),
ex.__response_code__,
)
except Exception as ex:
logger.exception("Unexpected error in api processing")
return make_response_error(ex, in_httpcode=500), 500
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_import_packages(operation_id):
"""
GET /imports/images/{operation_id}/packages
:param operation_id:
:return:
"""
try:
client = internal_client_for(
CatalogClient, userId=ApiRequestContextProxy.namespace()
)
resp = client.list_import_content(operation_id, "packages")
return resp, 200
except api_exceptions.AnchoreApiError as ex:
return (
make_response_error(ex, in_httpcode=ex.__response_code__),
ex.__response_code__,
)
except Exception as ex:
logger.exception("Unexpected error in api processing")
return make_response_error(ex, in_httpcode=500), 500
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_import_dockerfiles(operation_id):
"""
GET /imports/images/{operation_id}/dockerfile
:param operation_id:
:return:
"""
try:
client = internal_client_for(
CatalogClient, userId=ApiRequestContextProxy.namespace()
)
resp = client.list_import_content(operation_id, "dockerfile")
return resp, 200
except api_exceptions.AnchoreApiError as ex:
return (
make_response_error(ex, in_httpcode=ex.__response_code__),
ex.__response_code__,
)
except Exception as ex:
logger.exception("Unexpected error in api processing")
return make_response_error(ex, in_httpcode=500), 500
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_import_image_manifests(operation_id):
"""
GET /imports/images/{operation_id}/manifest
:param operation_id:
:return:
"""
try:
client = internal_client_for(
CatalogClient, userId=ApiRequestContextProxy.namespace()
)
resp = client.list_import_content(operation_id, "manifest")
return resp, 200
except api_exceptions.AnchoreApiError as ex:
return (
make_response_error(ex, in_httpcode=ex.__response_code__),
ex.__response_code__,
)
except Exception as ex:
logger.exception("Unexpected error in api processing")
return make_response_error(ex, in_httpcode=500), 500
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_import_parent_manifests(operation_id):
"""
GET /imports/images/{operation_id}/manifest
:param operation_id:
:return:
"""
try:
client = internal_client_for(
CatalogClient, userId=ApiRequestContextProxy.namespace()
)
resp = client.list_import_content(operation_id, "parent_manifest")
return resp, 200
except api_exceptions.AnchoreApiError as ex:
return (
make_response_error(ex, in_httpcode=ex.__response_code__),
ex.__response_code__,
)
except Exception as ex:
logger.exception("Unexpected error in api processing")
return make_response_error(ex, in_httpcode=500), 500
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_import_image_configs(operation_id):
"""
GET /imports/images/{operation_id}/image_config
:param operation_id:
:return:
"""
try:
client = internal_client_for(
CatalogClient, userId=ApiRequestContextProxy.namespace()
)
resp = client.list_import_content(operation_id, "image_config")
return resp, 200
except api_exceptions.AnchoreApiError as ex:
return (
make_response_error(ex, in_httpcode=ex.__response_code__),
ex.__response_code__,
)
except Exception as ex:
logger.exception("Unexpected error in api processing")
return make_response_error(ex, in_httpcode=500), 500
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def import_image_packages(operation_id):
"""
POST /imports/images/{operation_id}/packages
:param operation_id:
:param sbom:
:return:
"""
return content_upload(operation_id, "packages", request)
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def import_image_dockerfile(operation_id):
"""
POST /imports/images/{operation_id}/dockerfile
:param operation_id:
:param sbom:
:return:
"""
return content_upload(operation_id, "dockerfile", request)
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def import_image_manifest(operation_id):
"""
POST /imports/images/{operation_id}/manifest
:param operation_id:
:return:
"""
return content_upload(operation_id, "manifest", request)
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def import_image_parent_manifest(operation_id):
"""
POST /imports/images/{operation_id}/parent_manifest
:param operation_id:
:return:
"""
return content_upload(operation_id, "parent_manifest", request)
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def import_image_config(operation_id):
"""
POST /imports/images/{operation_id}/image_config
:param operation_id:
:return:
"""
return content_upload(operation_id, "image_config", request)
def content_upload(operation_id, content_type, request):
"""
Generic handler for multiple types of content uploads. Still operates at the API layer
:param operation_id:
:param content_type:
:param request:
:return:
"""
try:
client = internal_client_for(
CatalogClient, userId=ApiRequestContextProxy.namespace()
)
return (
client.upload_image_import_content(
operation_id, content_type, request.data
),
200,
)
except api_exceptions.AnchoreApiError as ex:
return (
make_response_error(ex, in_httpcode=ex.__response_code__),
ex.__response_code__,
)
except Exception as ex:
logger.exception("Unexpected error in api processing")
return make_response_error(ex, in_httpcode=500), 500
| 29.933535
| 90
| 0.690351
| 1,026
| 9,908
| 6.346004
| 0.096491
| 0.086162
| 0.05483
| 0.07065
| 0.84841
| 0.829673
| 0.818922
| 0.80341
| 0.770696
| 0.735371
| 0
| 0.013725
| 0.220529
| 9,908
| 330
| 91
| 30.024242
| 0.829341
| 0.116875
| 0
| 0.635897
| 0
| 0
| 0.055622
| 0.002501
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.153846
| 0
| 0.410256
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fc4a2b83e9fc67db531f69f21d30847b8b7b0dd8
| 103
|
py
|
Python
|
stream2py/sources/__init__.py
|
sylvainbonnot/stream2py
|
6b8180eff17e73202ece9f252cda76ae3a98353f
|
[
"Apache-2.0"
] | 1
|
2020-03-31T18:48:45.000Z
|
2020-03-31T18:48:45.000Z
|
stream2py/sources/__init__.py
|
sylvainbonnot/stream2py
|
6b8180eff17e73202ece9f252cda76ae3a98353f
|
[
"Apache-2.0"
] | null | null | null |
stream2py/sources/__init__.py
|
sylvainbonnot/stream2py
|
6b8180eff17e73202ece9f252cda76ae3a98353f
|
[
"Apache-2.0"
] | null | null | null |
"""
Sources
=======
.. automodule:: stream2py.sources.audio
"""
from stream2py.sources import audio
| 10.3
| 39
| 0.669903
| 10
| 103
| 6.9
| 0.6
| 0.463768
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022472
| 0.135922
| 103
| 9
| 40
| 11.444444
| 0.752809
| 0.543689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fc87b40add6ed0b9a6c53461503078e30928e2cd
| 8,957
|
py
|
Python
|
dit/pid/measures/idep.py
|
Ejjaffe/dit
|
c9d206f03d1de5a0a298b1d0ea9d79ea5e789ee1
|
[
"BSD-3-Clause"
] | 1
|
2020-03-13T10:30:11.000Z
|
2020-03-13T10:30:11.000Z
|
dit/pid/measures/idep.py
|
Ejjaffe/dit
|
c9d206f03d1de5a0a298b1d0ea9d79ea5e789ee1
|
[
"BSD-3-Clause"
] | null | null | null |
dit/pid/measures/idep.py
|
Ejjaffe/dit
|
c9d206f03d1de5a0a298b1d0ea9d79ea5e789ee1
|
[
"BSD-3-Clause"
] | null | null | null |
"""
The dependency-decomposition based unique measure partial information decomposition.
"""
from ...multivariate import coinformation
from ..pid import BaseUniquePID
from ...profiles import DependencyDecomposition
__all__ = (
'PID_dep',
'PID_RA',
)
class PID_dep(BaseUniquePID):
"""
The dependency partial information decomposition, as defined by James at al.
"""
_name = "I_dep"
@staticmethod
def _measure(d, sources, target, maxiter=None):
"""
This computes unique information as min(delta(I(sources : target))) where delta
is taken over the dependency decomposition.
Parameters
----------
d : Distribution
The distribution to compute i_dep for.
sources : iterable of iterables
The source variables.
target : iterable
The target variable.
Returns
-------
idep : dict
The value of I_dep for each individual source.
"""
uniques = {}
measure = {'I': lambda d: coinformation(d, [[0, 1], [2]])}
source_0_target = frozenset((frozenset((0, 2)),))
source_1_target = frozenset((frozenset((1, 2)),))
if len(sources) == 2:
dm = d.coalesce(sources + (target,)) # put it into [0, 1], [2] order
dd = DependencyDecomposition(dm, measures=measure, maxiter=maxiter)
u_0 = min(dd.delta(edge, 'I') for edge in dd.edges(source_0_target))
u_1 = min(dd.delta(edge, 'I') for edge in dd.edges(source_1_target))
uniques[sources[0]] = u_0
uniques[sources[1]] = u_1
else:
for source in sources:
others = sum((i for i in sources if i != source), ())
dm = d.coalesce([source, others, target])
dd = DependencyDecomposition(dm, measures=measure, maxiter=maxiter)
u = min(dd.delta(edge, 'I') for edge in dd.edges(source_0_target))
uniques[source] = u
return uniques
class PID_RA(BaseUniquePID):
"""
The "reproducibility analysis" partial information decomposition, derived
from the work of Zwick.
"""
_name = "I_RA"
@staticmethod
def _measure(d, sources, target, maxiter=None):
"""
This computes unique information as the change in I[sources : target]
when adding the source-target constraint.
Parameters
----------
d : Distribution
The distribution to compute i_RA for.
sources : iterable of iterables
The source variables.
target : iterable
The target variable.
Returns
-------
ira : dict
The value of I_RA for each individual source.
"""
uniques = {}
measure = {'I': lambda d: coinformation(d, [[0, 1], [2]])}
source_0_target = frozenset([frozenset((0, 2))])
source_1_target = frozenset([frozenset((1, 2))])
all_pairs = frozenset([frozenset((0, 1))]) | source_0_target | source_1_target
if len(sources) == 2:
dm = d.coalesce(sources + (target,))
dd = DependencyDecomposition(dm, measures=measure, maxiter=maxiter)
u_0 = dd.delta((all_pairs, all_pairs - source_0_target), 'I')
u_1 = dd.delta((all_pairs, all_pairs - source_1_target), 'I')
uniques[sources[0]] = u_0
uniques[sources[1]] = u_1
else:
for source in sources:
others = sum((i for i in sources if i != source), ())
dm = d.coalesce([source, others, target])
dd = DependencyDecomposition(dm, measures=measure, maxiter=maxiter)
u = dd.delta((all_pairs, all_pairs - source_0_target), 'I')
uniques[source] = u
return uniques
class PID_dep_a(BaseUniquePID):
"""
The dependency partial information decomposition, as defined by James at al.
Notes
-----
This alternative method behaves oddly with three or more sources.
"""
_name = "I_dep_a"
@staticmethod
def _measure(d, sources, target): # pragma: no cover
"""
This computes unique information as min(delta(I(sources : target))) where delta
is taken over the dependency decomposition.
Parameters
----------
d : Distribution
The distribution to compute i_dep_a for.
sources : iterable of iterables
The source variables.
target : iterable
The target variable.
Returns
-------
idepa : dict
The value of I_dep_a for each individual source.
"""
var_to_index = {var: i for i, var in enumerate(sources + (target,))}
d = d.coalesce(sorted(var_to_index.keys(), key=lambda k: var_to_index[k]))
invars = [var_to_index[var] for var in sources]
outvar = [var_to_index[(var,)] for var in target]
measure = {'I': lambda d: coinformation(d, [invars, outvar])}
dd = DependencyDecomposition(d, list(var_to_index.values()), measures=measure)
uniques = {}
for source in sources:
constraint = frozenset((frozenset((var_to_index[source], var_to_index[target])),))
u = min(dd.delta(edge, 'I') for edge in dd.edges(constraint))
uniques[source] = u
return uniques
class PID_dep_b(BaseUniquePID):
"""
The reduced dependency partial information decomposition, as defined by James at al.
Notes
-----
This decomposition is known to be inconsistent.
"""
_name = "I_dep_b"
@staticmethod
def _measure(d, sources, target): # pragma: no cover
"""
This computes unique information as min(delta(I(sources : target))) where delta
is taken over a restricted dependency decomposition which never constrains dependencies
among the sources.
Parameters
----------
d : Distribution
The distribution to compute i_dep_b for.
sources : iterable of iterables
The source variables.
target : iterable
The target variable.
Returns
-------
idepb : dict
The value of I_dep_b for each individual source.
"""
var_to_index = {var: i for i, var in enumerate(sources + (target,))}
target_index = var_to_index[target]
d = d.coalesce(sorted(var_to_index.keys(), key=lambda k: var_to_index[k]))
invars = [var_to_index[var] for var in sources]
outvar = [var_to_index[(var,)] for var in target]
measure = {'I': lambda d: coinformation(d, [invars, outvar])}
dd = DependencyDecomposition(d, list(var_to_index.values()), measures=measure)
uniques = {}
for source in sources:
constraint = frozenset((frozenset((var_to_index[source], target_index)),))
broja_style = lambda edge: all({target_index} < set(_) for _ in edge[0] if len(_) > 1)
edge_set = (edge for edge in dd.edges(constraint) if broja_style(edge))
u = min(dd.delta(edge, 'I') for edge in edge_set)
uniques[source] = u
return uniques
class PID_dep_c(BaseUniquePID):
"""
The reduced dependency partial information decomposition, as defined by James at al.
Notes
-----
This decomposition can result in subadditive redundancy.
"""
_name = "I_dep_c"
@staticmethod
def _measure(d, sources, target): # pragma: no cover
"""
This computes unique information as min(delta(I(sources : target))) where delta
is taken over a restricted dependency decomposition which never constrains dependencies
among the sources.
Parameters
----------
d : Distribution
The distribution to compute i_dep_c for.
sources : iterable of iterables
The source variables.
target : iterable
The target variable.
Returns
-------
idepc : dict
The value of I_dep_c for each individual source.
"""
var_to_index = {var: i for i, var in enumerate(sources + (target,))}
d = d.coalesce(sorted(var_to_index.keys(), key=lambda k: var_to_index[k]))
invars = [var_to_index[var] for var in sources]
outvar = [var_to_index[(var,)] for var in target]
measure = {'I': lambda d: coinformation(d, [invars, outvar])}
dd = DependencyDecomposition(d, list(var_to_index.values()), measures=measure)
uniques = {}
for source in sources:
constraint = frozenset((frozenset((var_to_index[source], var_to_index[target])),))
edge_set = (edge for edge in dd.edges(constraint) if tuple(invars) in edge[0])
u = min(dd.delta(edge, 'I') for edge in edge_set)
uniques[source] = u
return uniques
| 35.12549
| 98
| 0.592609
| 1,073
| 8,957
| 4.809879
| 0.132339
| 0.023251
| 0.046503
| 0.02267
| 0.854873
| 0.851967
| 0.836078
| 0.823678
| 0.79752
| 0.760705
| 0
| 0.007665
| 0.300882
| 8,957
| 254
| 99
| 35.26378
| 0.816512
| 0.31975
| 0
| 0.679245
| 0
| 0
| 0.010722
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04717
| false
| 0
| 0.028302
| 0
| 0.216981
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5d9cd62c3e9905fa3afd1c46645654e484e03d92
| 153
|
py
|
Python
|
Gal2Renpy/TagSource/BgTag.py
|
dtysky/Gal2Renpy
|
59a70c5d336394155dedaf82d17bd99297f92d1a
|
[
"MIT"
] | 36
|
2015-04-19T05:03:10.000Z
|
2022-03-29T08:12:38.000Z
|
Gal2Renpy/TagSource/BgTag.py
|
dtysky/Gal2Renpy
|
59a70c5d336394155dedaf82d17bd99297f92d1a
|
[
"MIT"
] | 2
|
2016-05-05T07:24:09.000Z
|
2017-11-01T05:32:11.000Z
|
Gal2Renpy/TagSource/BgTag.py
|
dtysky/Gal2Renpy
|
59a70c5d336394155dedaf82d17bd99297f92d1a
|
[
"MIT"
] | 2
|
2016-12-01T02:12:33.000Z
|
2020-03-09T02:27:19.000Z
|
#coding:utf-8
#################################
#Copyright(c) 2014 dtysky
#################################
import G2R
class BgTag(G2R.TagSource):
pass
| 19.125
| 33
| 0.424837
| 14
| 153
| 4.642857
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049645
| 0.078431
| 153
| 8
| 34
| 19.125
| 0.411348
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 6
|
5db45fdb0d381fc01824f81972e603baafd9e243
| 9,551
|
py
|
Python
|
tests/unit/test_event_listener_factory.py
|
mpejcoch/aviso
|
250b5646220fae85725278b3ca80fed4e15a103a
|
[
"Apache-2.0"
] | 6
|
2021-02-03T17:55:05.000Z
|
2022-02-20T08:05:42.000Z
|
tests/unit/test_event_listener_factory.py
|
mpejcoch/aviso
|
250b5646220fae85725278b3ca80fed4e15a103a
|
[
"Apache-2.0"
] | 1
|
2021-04-26T14:42:39.000Z
|
2021-04-26T14:42:39.000Z
|
tests/unit/test_event_listener_factory.py
|
mpejcoch/aviso
|
250b5646220fae85725278b3ca80fed4e15a103a
|
[
"Apache-2.0"
] | 2
|
2021-02-09T15:07:41.000Z
|
2021-08-13T09:55:30.000Z
|
# (C) Copyright 1996- ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation
# nor does it submit to any jurisdiction.
import json
import os
import pytest
import yaml
from pyaviso import logger, user_config
from pyaviso.authentication import auth
from pyaviso.engine import engine_factory as ef
from pyaviso.event_listeners import event_listener_factory as elf
@pytest.fixture()
def conf() -> user_config.UserConfig: # this automatically configure the logging
c = user_config.UserConfig(conf_path="tests/config.yaml")
return c
@pytest.fixture()
def schema(conf):
# Load test schema
with open("tests/unit/fixtures/listener_schema.json") as schema:
return json.load(schema)
def test_empty_file(conf: user_config.UserConfig, schema):
logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0])
# create the notification listener factory
authenticator = auth.Auth.get_auth(conf)
engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator)
listener_factory = elf.EventListenerFactory(engine_factory, schema)
# open the listener yaml file
with open("tests/unit/fixtures/bad_listeners/empty.yaml", "r") as f:
listeners_dict = yaml.safe_load(f.read())
# parse it
try:
listener_factory.create_listeners(listeners_dict)
except AssertionError as e:
assert e.args[0] == "Event listeners definition cannot be empty"
def test_no_listeners(conf: user_config.UserConfig, schema):
logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0])
# create the notification listener factory
authenticator = auth.Auth.get_auth(conf)
engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator)
listener_factory = elf.EventListenerFactory(engine_factory, schema)
# open the listener yaml file
with open("tests/unit/fixtures/bad_listeners/noListeners.yaml", "r") as f:
listeners_dict = yaml.safe_load(f.read())
# parse it
try:
listener_factory.create_listeners(listeners_dict)
except AssertionError as e:
assert e.args[0] == "Event listeners definition must start with the keyword 'listeners'"
def test_bad_tree_structure(conf: user_config.UserConfig, schema):
logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0])
# create the notification listener factory
authenticator = auth.Auth.get_auth(conf)
engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator)
listener_factory = elf.EventListenerFactory(engine_factory, schema)
# open the listener yaml file
with open("tests/unit/fixtures/bad_listeners/badTree.yaml", "r") as f:
listeners_dict = yaml.safe_load(f.read())
# parse it
try:
listener_factory.create_listeners(listeners_dict)
except AssertionError as e:
assert e.args[0] == "Wrong file structure"
def test_bad_attribute(conf: user_config.UserConfig, schema):
logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0])
# create the notification listener factory
authenticator = auth.Auth.get_auth(conf)
engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator)
listener_factory = elf.EventListenerFactory(engine_factory, schema)
# open the listener yaml file
with open("tests/unit/fixtures/bad_listeners/badAttribute.yaml", "r") as f:
listeners_dict = yaml.safe_load(f.read())
# parse it
try:
listener_factory.create_listeners(listeners_dict)
except AssertionError as e:
assert e.args[0] == "Key day is not allowed"
def test_bad_format(conf: user_config.UserConfig, schema):
logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0])
# create the notification listener factory
authenticator = auth.Auth.get_auth(conf)
engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator)
listener_factory = elf.EventListenerFactory(engine_factory, schema)
# open the listener yaml file
with open("tests/unit/fixtures/bad_listeners/badFormat.yaml", "r") as f:
listeners_dict = yaml.safe_load(f.read())
# parse it
try:
listener_factory.create_listeners(listeners_dict)
except ValueError as e:
assert e.args[0] == "Value 2021-01-01 is not valid for key date"
def test_no_trigger(conf: user_config.UserConfig, schema):
logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0])
# create the notification listener factory
authenticator = auth.Auth.get_auth(conf)
engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator)
listener_factory = elf.EventListenerFactory(engine_factory, schema)
# open the listener yaml file
with open("tests/unit/fixtures/bad_listeners/noTrigger.yaml", "r") as f:
listeners_dict = yaml.safe_load(f.read())
# parse it
try:
listener_factory.create_listeners(listeners_dict)
except AssertionError as e:
assert e.args[0] == "At least one trigger must be defined"
def test_bad_trigger_type(conf: user_config.UserConfig, schema):
logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0])
# create the notification listener factory
authenticator = auth.Auth.get_auth(conf)
engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator)
listener_factory = elf.EventListenerFactory(engine_factory, schema)
# open the listener yaml file
with open("tests/unit/fixtures/bad_listeners/badTriggerType.yaml", "r") as f:
listeners_dict = yaml.safe_load(f.read())
# parse it
try:
listener_factory.create_listeners(listeners_dict)
except KeyError as e:
assert e.args[0] == "Trigger type logger not recognised"
def test_bad_trigger(conf: user_config.UserConfig, schema):
logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0])
# create the notification listener factory
authenticator = auth.Auth.get_auth(conf)
engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator)
listener_factory = elf.EventListenerFactory(engine_factory, schema)
# open the listener yaml file
with open("tests/unit/fixtures/bad_listeners/badTrigger.yaml", "r") as f:
listeners_dict = yaml.safe_load(f.read())
# parse it
try:
listener_factory.create_listeners(listeners_dict)
except AssertionError as e:
assert e.args[0] == "'type' is a mandatory field in trigger"
def test_single_listener_complete(conf: user_config.UserConfig, schema):
logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0])
# create the notification listener factory
authenticator = auth.Auth.get_auth(conf)
engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator)
listener_factory = elf.EventListenerFactory(engine_factory, schema)
# open the listener yaml file
with open("tests/unit/fixtures/good_listeners/complete_flight_listener.yaml", "r") as f:
listeners_dict = yaml.safe_load(f.read())
# parse it
listeners: list = listener_factory.create_listeners(listeners_dict)
assert listeners.__len__() == 1
listener = listeners.pop()
assert listener.keys is not None
assert listener.keys[0] # this will fail if the path was an empty string
def test_single_listener(conf: user_config.UserConfig, schema):
logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0])
# create the notification listener factory
authenticator = auth.Auth.get_auth(conf)
engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator)
listener_factory = elf.EventListenerFactory(engine_factory, schema)
# open the listener yaml file
with open("tests/unit/fixtures/good_listeners/basic_flight_listener.yaml", "r") as f:
listeners_dict = yaml.safe_load(f.read())
# parse it
listeners: list = listener_factory.create_listeners(listeners_dict)
assert listeners.__len__() == 1
listener = listeners.pop()
assert len(listener.keys) == 2
assert listener.keys[0] == "/tmp/aviso/flight/Italy/"
def test_multiple_listener(conf: user_config.UserConfig, schema):
logger.debug(os.environ.get("PYTEST_CURRENT_TEST").split(":")[-1].split(" ")[0])
# create the notification listener factory
authenticator = auth.Auth.get_auth(conf)
engine_factory: ef.EngineFactory = ef.EngineFactory(conf.notification_engine, authenticator)
listener_factory = elf.EventListenerFactory(engine_factory, schema)
# open the listener yaml file
with open("tests/unit/fixtures/good_listeners/multiple_flight_listeners.yaml", "r") as f:
listeners_dict = yaml.safe_load(f.read())
# parse it
listeners: list = listener_factory.create_listeners(listeners_dict)
assert listeners.__len__() == 3
for listener in listeners:
assert listener.keys is not None
assert listener.keys[0] # this will fail if the path was an empty string
| 45.265403
| 96
| 0.732593
| 1,249
| 9,551
| 5.43715
| 0.136109
| 0.075099
| 0.038286
| 0.042409
| 0.80695
| 0.803269
| 0.798851
| 0.798851
| 0.798851
| 0.798851
| 0
| 0.006604
| 0.159774
| 9,551
| 210
| 97
| 45.480952
| 0.839626
| 0.143755
| 0
| 0.642857
| 0
| 0
| 0.147793
| 0.079061
| 0
| 0
| 0
| 0
| 0.164286
| 1
| 0.092857
| false
| 0
| 0.057143
| 0
| 0.164286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5dbf7353ca61c90daeafd91f020e15b4a88ef555
| 38
|
py
|
Python
|
api/controller/algorithms/__init__.py
|
UST-QuAntiL/quantum-circuit-generator
|
2fe750cd4010f1aa8fbd8591ebad3c5817a2b8ad
|
[
"Apache-2.0"
] | null | null | null |
api/controller/algorithms/__init__.py
|
UST-QuAntiL/quantum-circuit-generator
|
2fe750cd4010f1aa8fbd8591ebad3c5817a2b8ad
|
[
"Apache-2.0"
] | 2
|
2021-11-11T08:54:23.000Z
|
2021-11-11T15:38:42.000Z
|
api/controller/algorithms/__init__.py
|
UST-QuAntiL/quantum-circuit-generator
|
2fe750cd4010f1aa8fbd8591ebad3c5817a2b8ad
|
[
"Apache-2.0"
] | null | null | null |
from .algorithm_controller import blp
| 19
| 37
| 0.868421
| 5
| 38
| 6.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 38
| 1
| 38
| 38
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5d11b568ea9beb89b75f89fc5ed7dd2a852210b9
| 36
|
py
|
Python
|
platform/core/polyaxon/polyaxon/__init__.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/polyaxon/polyaxon/__init__.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/polyaxon/polyaxon/__init__.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
from polyaxon.celery_api import app
| 18
| 35
| 0.861111
| 6
| 36
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5d181631f823061d4506b4f36aee0e68458d1b84
| 52
|
py
|
Python
|
weasel/__init__.py
|
QIB-Sheffield/Weasel
|
7e844c6dcb4fe0b671cd0249d2a30c7c4a39a9dd
|
[
"Apache-2.0"
] | 2
|
2021-12-29T12:49:57.000Z
|
2022-02-24T11:55:58.000Z
|
weasel/__init__.py
|
QIB-Sheffield/Weasel
|
7e844c6dcb4fe0b671cd0249d2a30c7c4a39a9dd
|
[
"Apache-2.0"
] | 2
|
2022-01-18T12:04:40.000Z
|
2022-01-18T12:05:50.000Z
|
weasel/__init__.py
|
QIB-Sheffield/Weasel
|
7e844c6dcb4fe0b671cd0249d2a30c7c4a39a9dd
|
[
"Apache-2.0"
] | null | null | null |
from weasel.main import *
from weasel.core import *
| 17.333333
| 25
| 0.769231
| 8
| 52
| 5
| 0.625
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 52
| 3
| 26
| 17.333333
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5d22f54b4a928449da594eb660541967bdb93a5c
| 4,644
|
py
|
Python
|
Validation/sift_ar_dpp.py
|
Shaalan31/LIWI
|
b4d615e0951b7c28c9258d0d7a8ff86c73c4ebe2
|
[
"MIT"
] | 2
|
2019-10-16T07:37:46.000Z
|
2020-10-04T10:31:02.000Z
|
Validation/sift_ar_dpp.py
|
Shaalan31/LIWI
|
b4d615e0951b7c28c9258d0d7a8ff86c73c4ebe2
|
[
"MIT"
] | 3
|
2021-03-19T00:22:56.000Z
|
2022-01-13T01:12:35.000Z
|
Validation/sift_ar_dpp.py
|
Shaalan31/LIWI
|
b4d615e0951b7c28c9258d0d7a8ff86c73c4ebe2
|
[
"MIT"
] | 2
|
2019-06-04T10:58:39.000Z
|
2019-06-06T18:52:01.000Z
|
#dpp means data pre processor for sift
from siftmodel.sift_model import *
import os, errno
from server.utils.utilities import *
def Samples_gen(start,end):
print('SAMPPLESS - ',start)
t = [1, 50, 150 ]
phi = [36, 72, 108]
sift_model = SiftModel()
sift_model.set_code_book('ar')
start_class = int(start)
num_classes = int(end)
base_path = 'C:/Users/omars/Documents/Github/LIWI/Omar/KHATT/Samples/Class'
base_samples_t = 'C:/Users/omars/Documents/Github/LIWI/Omar/ValidationArabic/Samples/SDS/'
base_samples_phi = 'C:/Users/omars/Documents/Github/LIWI/Omar/ValidationArabic/Samples/SOH/'
for class_number in range(start_class, num_classes + 1):
writer_texture_features = []
SDS_train = []
SOH_train = []
print('Class' + str(class_number) + ':')
# loop on training data for each writer
for filename in glob.glob(
base_path + str(class_number) + '/*.tif'):
print(filename)
image = cv2.imread(filename)
name = Path(filename).name
name = name.replace('tif','csv')
print('Sift Model')
for idx in range(0,3):
print(idx)
SDS, SOH = sift_model.get_features(name, image=image,t=t[idx],phi=phi[idx])
str_t = str(t[idx])
str_phi = str(phi[idx])
while len(str_t) < 3:
str_t = '0' + str_t
while len(str_phi) < 3:
str_phi = '0' + str_phi
try:
# print(base_samples_h+str(h_coeff)+"/Class"+str(class_number))
os.makedirs(base_samples_t + str_t + "/Class" + str(class_number))
except OSError as e:
if e.errno != errno.EEXIST:
raise
np.savetxt(base_samples_t+str_t+"/Class"+str(class_number)+'/'+name, SDS, delimiter=",")
try:
# print(base_samples_h+str(h_coeff)+"/Class"+str(class_number))
os.makedirs(base_samples_phi + str_phi + "/Class" + str(class_number))
except OSError as e:
if e.errno != errno.EEXIST:
raise
np.savetxt(base_samples_phi+str_phi+"/Class"+str(class_number)+'/'+name, SOH, delimiter=",")
def Testcase_gen(start,num):
t = [1, 50, 150]
phi = [36, 72, 108]
sift_model = SiftModel()
sift_model.set_code_book('ar')
base_path = 'C:/Users/omars/Documents/Github/LIWI/Omar/KHATT/TestCases/'
base_samples_t = 'C:/Users/omars/Documents/Github/LIWI/Omar/ValidationArabic/Samples/SDS/'
base_samples_phi = 'C:/Users/omars/Documents/Github/LIWI/Omar/ValidationArabic/Samples/SOH/'
print('TESTCASES - ',start)
start_class = int(start)
num_classes = int(num)
base_test_t = 'C:/Users/omars/Documents/Github/LIWI/Omar/ValidationArabic/TestCases/SDS/'
base_test_phi = 'C:/Users/omars/Documents/Github/LIWI/Omar/ValidationArabic/TestCases/SOH/'
for class_number in range(start_class, num_classes + 1):
SDS_train = []
SOH_train = []
print('Class' + str(class_number) + ':')
# loop on training data for each writer
for filename in glob.glob(
base_path + 'testing'+str(class_number) + '.png'):
print(filename)
image = cv2.imread(filename)
name = Path(filename).name
name = name.replace('png','csv')
print(name)
print('Sift Model')
for idx in range(0,3):
print(idx)
SDS, SOH = sift_model.get_features(name, image=image,t=t[idx],phi=phi[idx])
str_t = str(t[idx])
str_phi = str(phi[idx])
while len(str_t) < 3:
str_t = '0' + str_t
while len(str_phi) < 3:
str_phi = '0' + str_phi
try:
os.makedirs(base_test_t+str_t)
except OSError as e:
if e.errno != errno.EEXIST:
raise
try:
os.makedirs(base_test_phi + str_phi)
except OSError as e:
if e.errno != errno.EEXIST:
raise
np.savetxt(base_test_t+str_t+'/'+name, SDS, delimiter=",")
np.savetxt(base_test_phi+str_phi+'/'+name, SOH, delimiter=",")
for beg in range(170,350,20):
Testcase_gen(beg,20+beg)
Samples_gen(beg,20+beg)
| 28.490798
| 108
| 0.544789
| 576
| 4,644
| 4.210069
| 0.177083
| 0.023093
| 0.057732
| 0.065979
| 0.820619
| 0.784742
| 0.784742
| 0.759175
| 0.759175
| 0.681649
| 0
| 0.017397
| 0.331611
| 4,644
| 163
| 109
| 28.490798
| 0.763853
| 0.051034
| 0
| 0.6875
| 0
| 0
| 0.153077
| 0.124688
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020833
| false
| 0
| 0.03125
| 0
| 0.052083
| 0.114583
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5d3a123771289cfa8959c17afdfa9177199f1afd
| 266
|
py
|
Python
|
stable_baselines/simple_ddpg/__init__.py
|
spitis/stable-baselines
|
f62cd6698b2427c0fb5ac452b9059a59b22cde81
|
[
"MIT"
] | null | null | null |
stable_baselines/simple_ddpg/__init__.py
|
spitis/stable-baselines
|
f62cd6698b2427c0fb5ac452b9059a59b22cde81
|
[
"MIT"
] | 2
|
2018-11-14T22:53:17.000Z
|
2018-11-15T00:06:40.000Z
|
stable_baselines/simple_ddpg/__init__.py
|
spitis/stable-baselines
|
f62cd6698b2427c0fb5ac452b9059a59b22cde81
|
[
"MIT"
] | null | null | null |
from stable_baselines.common.policies import MlpPolicy, CnnPolicy, LnMlpPolicy, LnCnnPolicy
from stable_baselines.simple_ddpg.simple_ddpg import SimpleDDPG, make_feedforward_extractor, identity_extractor
from stable_baselines.common.replay_buffer import ReplayBuffer
| 88.666667
| 111
| 0.898496
| 32
| 266
| 7.1875
| 0.625
| 0.130435
| 0.247826
| 0.217391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06015
| 266
| 3
| 112
| 88.666667
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5d475cf707b1d81452f17381078c9cf074c72aa9
| 33
|
py
|
Python
|
extensions/repost/__init__.py
|
nalabelle/discord-bot
|
33c140e6bd3e2ba41d2368dc1918913c6011ab07
|
[
"MIT"
] | 1
|
2017-03-22T19:13:09.000Z
|
2017-03-22T19:13:09.000Z
|
extensions/giphy/__init__.py
|
nalabelle/discord-bot
|
33c140e6bd3e2ba41d2368dc1918913c6011ab07
|
[
"MIT"
] | 1
|
2021-11-13T04:17:21.000Z
|
2021-11-13T04:17:21.000Z
|
extensions/giphy/__init__.py
|
nalabelle/discord-bot
|
33c140e6bd3e2ba41d2368dc1918913c6011ab07
|
[
"MIT"
] | 3
|
2017-03-22T19:13:34.000Z
|
2019-03-14T21:11:52.000Z
|
from .cmd import setup, teardown
| 16.5
| 32
| 0.787879
| 5
| 33
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 33
| 1
| 33
| 33
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5d4d70c8f025d23b236b8ea88eb5c60b3a2079c0
| 129
|
py
|
Python
|
ufdl-core-app/src/ufdl/core_app/filter/__init__.py
|
waikato-ufdl/ufdl-backend
|
776fc906c61eba6c2f2e6324758e7b8a323e30d7
|
[
"Apache-2.0"
] | null | null | null |
ufdl-core-app/src/ufdl/core_app/filter/__init__.py
|
waikato-ufdl/ufdl-backend
|
776fc906c61eba6c2f2e6324758e7b8a323e30d7
|
[
"Apache-2.0"
] | 85
|
2020-07-24T00:04:28.000Z
|
2022-02-10T10:35:15.000Z
|
ufdl-core-app/src/ufdl/core_app/filter/__init__.py
|
waikato-ufdl/ufdl-backend
|
776fc906c61eba6c2f2e6324758e7b8a323e30d7
|
[
"Apache-2.0"
] | null | null | null |
"""
Package for functionality that manages filtering of list
requests.
"""
from ._filter_list_request import filter_list_request
| 21.5
| 56
| 0.821705
| 17
| 129
| 5.941176
| 0.764706
| 0.19802
| 0.336634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 129
| 5
| 57
| 25.8
| 0.885965
| 0.511628
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5d7110b01979585239f3f76426fd0854f006213a
| 21
|
py
|
Python
|
ryu/app/network_awareness/__init__.py
|
hiArvin/ryu
|
b568088f8fe1d2334d9773f6ddaac8674f2a0f61
|
[
"Apache-2.0"
] | 269
|
2015-03-08T11:32:45.000Z
|
2022-03-30T11:18:16.000Z
|
ryu/app/network_awareness/__init__.py
|
leeshy-tech/ryu
|
a8e5aff03fe3609243a25eaa7aeb9e01d1c69643
|
[
"Apache-2.0"
] | null | null | null |
ryu/app/network_awareness/__init__.py
|
leeshy-tech/ryu
|
a8e5aff03fe3609243a25eaa7aeb9e01d1c69643
|
[
"Apache-2.0"
] | 205
|
2015-01-13T04:52:25.000Z
|
2022-03-30T13:37:33.000Z
|
"For loading module"
| 10.5
| 20
| 0.761905
| 3
| 21
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 21
| 1
| 21
| 21
| 0.888889
| 0.857143
| 0
| 0
| 0
| 0
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5386515411c3b672d18a711e4fd6a0fd9b7588b1
| 587
|
py
|
Python
|
test/old_test/test_filemapper_metadata.py
|
AsiganTheSunk/python-multimedia-filemapper
|
5daa07c51f3e85df48a0c336633ac150687fe24c
|
[
"Xnet",
"X11"
] | null | null | null |
test/old_test/test_filemapper_metadata.py
|
AsiganTheSunk/python-multimedia-filemapper
|
5daa07c51f3e85df48a0c336633ac150687fe24c
|
[
"Xnet",
"X11"
] | null | null | null |
test/old_test/test_filemapper_metadata.py
|
AsiganTheSunk/python-multimedia-filemapper
|
5daa07c51f3e85df48a0c336633ac150687fe24c
|
[
"Xnet",
"X11"
] | null | null | null |
# Metadata Create Tests
def test0_metadata_create():
return
# Metadata Getters Tests
def test0_metadata_name():
return
def test0_metadata_ename():
return
def test0_metadata_season():
return
def test0_metadata_episode():
return
def test0_metadata_quality():
return
def test0_metadata_extension():
return
def test0_metadata_year():
return
def tets0_metadata_fflag():
return
# ExtendendMetadata Create Tests
def test0_extendedmetadata_create():
return
# ExtendendMetadata Getters Tests
def test0_extendedmetadata_genre():
return
| 13.340909
| 36
| 0.749574
| 67
| 587
| 6.238806
| 0.283582
| 0.191388
| 0.30622
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023061
| 0.187394
| 587
| 44
| 37
| 13.340909
| 0.853249
| 0.182283
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
5396b5899c6c0b9bcf3dfbfd29893891b85684fb
| 19,209
|
py
|
Python
|
ultra/utils/click_models.py
|
Keytoyze/Interactional-Observation-Based-Model
|
cc3dd07d922f7702bd424d32a785f62f49b4364c
|
[
"Apache-2.0"
] | 4
|
2021-06-21T22:07:43.000Z
|
2022-01-25T01:25:14.000Z
|
ultra/utils/click_models.py
|
Keytoyze/Interactional-Observation-Based-Model
|
cc3dd07d922f7702bd424d32a785f62f49b4364c
|
[
"Apache-2.0"
] | null | null | null |
ultra/utils/click_models.py
|
Keytoyze/Interactional-Observation-Based-Model
|
cc3dd07d922f7702bd424d32a785f62f49b4364c
|
[
"Apache-2.0"
] | null | null | null |
import os
import sys
import random
import json
from math import exp
def loadModelFromJson(model_desc):
click_model = PositionBiasedModel()
if model_desc['model_name'] == 'user_browsing_model':
click_model = UserBrowsingModel()
elif model_desc['model_name'] == 'cascade_model':
click_model = CascadeModel()
elif model_desc['model_name'] == 'click_chain_model':
click_model = ClickChainModel()
elif model_desc['model_name'] == 'bidirection_dcm':
click_model = BidirectionDCM()
elif model_desc['model_name'] == 'context_user_browsing_model':
click_model = ContextUserBrowsingModel()
click_model.eta = model_desc['eta']
click_model.click_prob = model_desc['click_prob']
click_model.exam_prob = model_desc['exam_prob']
return click_model
class ClickModel:
def __init__(self, neg_click_prob=0.0, pos_click_prob=1.0,
relevance_grading_num=1, eta=1.0):
self.exam_prob = None
self.setExamProb(eta)
self.setClickProb(
neg_click_prob,
pos_click_prob,
relevance_grading_num)
@property
def model_name(self):
return 'click_model'
# Serialize model into a json.
def getModelJson(self):
desc = {
'model_name': self.model_name,
'eta': self.eta,
'click_prob': self.click_prob,
'exam_prob': self.exam_prob
}
return desc
# Generate noisy click probability based on relevance grading number
# Inspired by ERR
def setClickProb(self, neg_click_prob, pos_click_prob,
relevance_grading_num):
b = (pos_click_prob - neg_click_prob) / \
(pow(2, relevance_grading_num) - 1)
a = neg_click_prob - b
self.click_prob = [
a + pow(2, i) * b for i in range(relevance_grading_num + 1)]
# Set the examination probability for the click model.
def setExamProb(self, eta):
self.eta = eta
return
# Sample clicks for a list
def sampleClicksForOneList(self, label_list):
return None
# Estimate propensity for clicks in a list
def estimatePropensityWeightsForOneList(
self, click_list, use_non_clicked_data=False):
return None
class PositionBiasedModel(ClickModel):
@property
def model_name(self):
return 'position_biased_model'
def setExamProb(self, eta):
self.eta = eta
self.original_exam_prob = [0.68, 0.61, 0.48,
0.34, 0.28, 0.20, 0.11, 0.10, 0.08, 0.06]
self.exam_prob = [pow(x, eta) for x in self.original_exam_prob]
def sampleClicksForOneList(self, label_list):
click_list, exam_p_list, click_p_list = [], [], []
for rank in range(len(label_list)):
click, exam_p, click_p = self.sampleClick(rank, label_list[rank])
click_list.append(click)
exam_p_list.append(exam_p)
click_p_list.append(click_p)
return click_list, exam_p_list, click_p_list
def estimatePropensityWeightsForOneList(
self, click_list, use_non_clicked_data=False):
propensity_weights = []
for r in range(len(click_list)):
pw = 0.0
if use_non_clicked_data | click_list[r] > 0:
pw = 1.0 / self.getExamProb(r) * self.getExamProb(0)
propensity_weights.append(pw)
return propensity_weights
def sampleClick(self, rank, relevance_label):
if not relevance_label == int(relevance_label):
print('RELEVANCE LABEL MUST BE INTEGER!')
relevance_label = int(relevance_label) if relevance_label > 0 else 0
exam_p = self.getExamProb(rank)
click_p = self.click_prob[relevance_label if relevance_label < len(
self.click_prob) else -1]
click = 1 if random.random() < exam_p * click_p else 0
return click, exam_p, click_p
def getExamProb(self, rank):
return self.exam_prob[rank if rank < len(self.exam_prob) else -1] ** self.dynamic_eta
class UserBrowsingModel(ClickModel):
@property
def model_name(self):
return 'user_browsing_model'
def setExamProb(self, eta):
self.eta = eta
self.original_rd_exam_table = [
[1.0],
[0.98, 1.0],
[1.0, 0.62, 0.95],
[1.0, 0.77, 0.42, 0.82],
[1.0, 0.92, 0.55, 0.31, 0.69],
[1.0, 0.96, 0.63, 0.4, 0.22, 0.54],
[1.0, 0.99, 0.73, 0.46, 0.29, 0.17, 0.47],
[1.0, 1.0, 0.89, 0.52, 0.35, 0.24, 0.14, 0.43],
[1.0, 1.0, 0.95, 0.68, 0.4, 0.29, 0.19, 0.12, 0.41],
[1.0, 1.0, 1.0, 0.96, 0.52, 0.36, 0.27, 0.18, 0.12, 0.43]
]
self.exam_prob = []
for i in range(len(self.original_rd_exam_table)):
self.exam_prob.append([pow(x, eta)
for x in self.original_rd_exam_table[i]])
def sampleClicksForOneList(self, label_list):
click_list, exam_p_list, click_p_list = [], [], []
last_click_rank = -1
for rank in range(len(label_list)):
click, exam_p, click_p = self.sampleClick(
rank, last_click_rank, label_list[rank])
if click > 0:
last_click_rank = rank
click_list.append(click)
exam_p_list.append(exam_p)
click_p_list.append(click_p)
return click_list, exam_p_list, click_p_list
def estimatePropensityWeightsForOneList(
self, click_list, use_non_clicked_data=False):
propensity_weights = []
last_click_rank = -1
for r in range(len(click_list)):
pw = 0.0
if use_non_clicked_data | click_list[r] > 0:
pw = 1.0 / self.getExamProb(r, last_click_rank)
if click_list[r] > 0:
last_click_rank = r
propensity_weights.append(pw)
return propensity_weights
def sampleClick(self, rank, last_click_rank, relevance_label):
if not relevance_label == int(relevance_label):
print('RELEVANCE LABEL MUST BE INTEGER!')
relevance_label = int(relevance_label) if relevance_label > 0 else 0
exam_p = self.getExamProb(rank, last_click_rank)
click_p = self.click_prob[relevance_label if relevance_label < len(
self.click_prob) else -1]
click = 1 if random.random() < exam_p * click_p else 0
return click, exam_p, click_p
def getExamProb(self, rank, last_click_rank):
distance = rank - last_click_rank
if rank < len(self.exam_prob):
exam_p = self.exam_prob[rank][distance - 1]
else:
if distance > rank:
exam_p = self.exam_prob[-1][-1]
else:
idx = distance - \
1 if distance < len(self.exam_prob[-1]) - 1 else -2
exam_p = self.exam_prob[-1][idx]
pbm_exam = [0.68, 0.61, 0.48, 0.34, 0.28, 0.20, 0.11, 0.10, 0.08, 0.06, 0][rank if rank < 10 else -1]
# return exam_p * (1 - self.dynamic_eta) + pbm_exam * self.dynamic_eta
return exam_p ** self.dynamic_eta
class ContextUserBrowsingModel(ClickModel):
@property
def model_name(self):
return 'context_user_browsing_model'
def setExamProb(self, eta):
self.eta = eta
self.original_rd_exam_table = [
[1.0],
[0.98, 1.0],
[1.0, 0.62, 0.95],
[1.0, 0.77, 0.42, 0.82],
[1.0, 0.92, 0.55, 0.31, 0.69],
[1.0, 0.96, 0.63, 0.4, 0.22, 0.54],
[1.0, 0.99, 0.73, 0.46, 0.29, 0.17, 0.47],
[1.0, 1.0, 0.89, 0.52, 0.35, 0.24, 0.14, 0.43],
[1.0, 1.0, 0.95, 0.68, 0.4, 0.29, 0.19, 0.12, 0.41],
[1.0, 1.0, 1.0, 0.96, 0.52, 0.36, 0.27, 0.18, 0.12, 0.43]
]
self.exam_prob = []
for i in range(len(self.original_rd_exam_table)):
self.exam_prob.append([pow(x, eta)
for x in self.original_rd_exam_table[i]])
def sampleClicksForOneList(self, label_list):
click_list, exam_p_list, click_p_list = [], [], []
last_click_rank = -1
for rank in range(len(label_list)):
click, exam_p, click_p = self.sampleClick(
rank, last_click_rank, label_list[rank])
if click > 0:
last_click_rank = rank
click_list.append(click)
exam_p_list.append(exam_p)
click_p_list.append(click_p)
return click_list, exam_p_list, click_p_list
def estimatePropensityWeightsForOneList(
self, click_list, use_non_clicked_data=False):
propensity_weights = []
last_click_rank = -1
for r in range(len(click_list)):
pw = 0.0
if use_non_clicked_data | click_list[r] > 0:
pw = 1.0 / self.getExamProb(r, last_click_rank)
if click_list[r] > 0:
last_click_rank = r
propensity_weights.append(pw)
return propensity_weights
def sampleClick(self, rank, last_click_rank, relevance_label):
if not relevance_label == int(relevance_label):
print('RELEVANCE LABEL MUST BE INTEGER!')
relevance_label = int(relevance_label) if relevance_label > 0 else 0
exam_p = self.getExamProb(rank, last_click_rank)
click_p = self.click_prob[relevance_label if relevance_label < len(
self.click_prob) else -1]
click = 1 if random.random() < exam_p * click_p else 0
return click, exam_p, click_p
def getExamProb(self, rank, last_click_rank):
distance = rank - last_click_rank
if rank < len(self.exam_prob):
exam_p = self.exam_prob[rank][distance - 1]
else:
if distance > rank:
exam_p = self.exam_prob[-1][-1]
else:
idx = distance - \
1 if distance < len(self.exam_prob[-1]) - 1 else -2
exam_p = self.exam_prob[-1][idx]
pbm_exam = [0.68, 0.61, 0.48, 0.34, 0.28, 0.20, 0.11, 0.10, 0.08, 0.06, 0][rank if rank < 10 else -1]
return exam_p * self.dynamic_eta + pbm_exam * (1 - self.dynamic_eta)
class CascadeModel(ClickModel):
@property
def model_name(self):
return 'cascade_model'
def setExamProb(self, eta):
self.eta = eta
self.origin_not_satisfied_prob = [(1 / (j + 1)) ** eta for j in range(10)]#[exp(-x / 4 - 0.7) for x in range(10)]
self.exam_prob = [pow(x, eta) for x in self.origin_not_satisfied_prob]
def sampleClicksForOneList(self, label_list):
click_list, exam_p_list, click_p_list = [], [], []
last_click_prob = 1.0
for rank in range(len(label_list)):
click, exam_p, click_p = self.sampleClick(rank, label_list[rank], last_click_prob)
click_list.append(click)
exam_p_list.append(exam_p)
click_p_list.append(click_p)
if click > 0:
last_click_prob = last_click_prob * self.getNotSatisfiedProb(rank)
return click_list, exam_p_list, click_p_list
# def estimatePropensityWeightsForOneList(
# self, click_list, use_non_clicked_data=False):
# propensity_weights = []
# for r in range(len(click_list)):
# pw = 0.0
# if use_non_clicked_data | click_list[r] > 0:
# pw = 1.0 / self.getExamProb(r) * self.getExamProb(0)
# propensity_weights.append(pw)
# return propensity_weights
def sampleClick(self, rank, relevance_label, last_exam_prob):
if not relevance_label == int(relevance_label):
print('RELEVANCE LABEL MUST BE INTEGER!')
relevance_label = int(relevance_label) if relevance_label > 0 else 0
exam_p = last_exam_prob
click_p = self.click_prob[relevance_label if relevance_label < len(
self.click_prob) else -1]
click = 1 if random.random() < exam_p * click_p else 0
return click, exam_p, click_p
def getExamProb(self, rank):
return 1
def getNotSatisfiedProb(self, rank):
return self.exam_prob[rank if rank < len(self.exam_prob) else 0] ** self.dynamic_eta
class BidirectionDCM(ClickModel):
@property
def model_name(self):
return 'random_dcm_model'
def setExamProb(self, eta):
self.eta = eta
self.origin_not_satisfied_prob = [(1 / (j + 1)) ** eta for j in range(10)]
self.exam_prob = [pow(x, eta) for x in self.origin_not_satisfied_prob]
def sampleClicksForOneList(self, label_list):
list_size = len(label_list)
click_list, exam_p_list, click_p_list = [0] * list_size, [0] * list_size, [0] * list_size
def sample_click(rank, last_click_prob, not_satisfied_prob):
click, exam_p, click_p = self.sampleClick(rank, label_list[rank], last_click_prob)
click_list[rank] = 1 - (1 - click_list[rank]) * (1 - click)
exam_p_list[rank] = 1 - (1 - exam_p_list[rank]) * (1 - exam_p)
click_p_list[rank] = 1 - (1 - click_p_list[rank]) * (1 - click_p)
if click > 0:
last_click_prob = last_click_prob * not_satisfied_prob
return last_click_prob
last_click_prob = 1.0
for rank in range(list_size):
last_click_prob = sample_click(rank, last_click_prob, self.getNotSatisfiedProb(rank))
last_click_prob = 1.0
for i, rank in enumerate(range(list_size - 1, -1, -1)):
last_click_prob = sample_click(rank, last_click_prob, self.getNotSatisfiedProb(i))
return click_list, exam_p_list, click_p_list
# def estimatePropensityWeightsForOneList(
# self, click_list, use_non_clicked_data=False):
# propensity_weights = []
# for r in range(len(click_list)):
# pw = 0.0
# if use_non_clicked_data | click_list[r] > 0:
# pw = 1.0 / self.getExamProb(r) * self.getExamProb(0)
# propensity_weights.append(pw)
# return propensity_weights
def sampleClick(self, rank, relevance_label, last_exam_prob):
if not relevance_label == int(relevance_label):
print('RELEVANCE LABEL MUST BE INTEGER!')
relevance_label = int(relevance_label) if relevance_label > 0 else 0
exam_p = last_exam_prob
click_p = self.click_prob[relevance_label if relevance_label < len(
self.click_prob) else -1]
click = 1 if random.random() < exam_p * click_p else 0
return click, exam_p, click_p
def getExamProb(self, rank):
return 1
def getNotSatisfiedProb(self, rank):
return self.exam_prob[rank if rank < len(self.exam_prob) else 0] ** self.dynamic_eta
class ClickChainModel(ClickModel):
@property
def model_name(self):
return 'click_chain_model'
def setExamProb(self, eta):
self.eta = eta
self.origin_not_satisfied_prob = [(1 / (j + 1)) ** eta for j in range(10)]#[exp(-x / 4 - 0.7) for x in range(10)]
self.exam_prob = [1.0, 0.4, 0.27] # alpha1, alpha2, alpha3
def sampleClicksForOneList(self, label_list):
click_list, exam_p_list, click_p_list = [], [], []
last_exam_prob = 1.0
a1, a2, a3 = self.exam_prob
a1 = a1 ** self.dynamic_eta
a2 = a2 ** self.dynamic_eta
a3 = a3 ** self.dynamic_eta
for rank in range(len(label_list)):
click, exam_p, click_p = self.sampleClick(rank, label_list[rank], last_exam_prob)
click_list.append(click)
exam_p_list.append(exam_p)
click_p_list.append(click_p)
last_exam_prob = last_exam_prob * (a1 - click * (a1 - a2 * (1 - click_p) - a3 * click_p))
return click_list, exam_p_list, click_p_list
def sampleClick(self, rank, relevance_label, last_exam_prob):
if not relevance_label == int(relevance_label):
print('RELEVANCE LABEL MUST BE INTEGER!')
relevance_label = int(relevance_label) if relevance_label > 0 else 0
exam_p = last_exam_prob
click_p = self.click_prob[relevance_label if relevance_label < len(
self.click_prob) else -1]
click = 1 if random.random() < exam_p * click_p else 0
return click, exam_p, click_p
def getExamProb(self, rank):
return 1
def getNotSatisfiedProb(self, rank):
return self.exam_prob[rank if rank < len(self.exam_prob) else 0]
def test_initialization():
# Test PBM
test_model = PositionBiasedModel(0.1, 0.9, 4, 1.0)
print('PBM(3, 4) -> %d, %f, %f' % test_model.sampleClick(3, 4))
print('PBM(2, 0) -> %d, %f, %f' % test_model.sampleClick(2, 0))
print('PBM(14, 1) -> %d, %f, %f' % test_model.sampleClick(14, 1))
click_list, exam_p_list, click_p_list = test_model.sampleClicksForOneList([
4, 0, 3, 4])
print(click_list)
print(exam_p_list)
print(click_p_list)
print(test_model.estimatePropensityWeightsForOneList(click_list))
# Test UBM
test_model = UserBrowsingModel(0.1, 0.9, 4, 1.0)
print('UBM(3, 0, 4) -> %d, %f, %f' % test_model.sampleClick(3, 0, 4))
print('UBM(14, -1, 0) -> %d, %f, %f' % test_model.sampleClick(14, -1, 0))
print('UBM(14, 9, 1) -> %d, %f, %f' % test_model.sampleClick(14, 9, 1))
print('UBM(14, 1, 2) -> %d, %f, %f' % test_model.sampleClick(14, 1, 2))
click_list, exam_p_list, click_p_list = test_model.sampleClicksForOneList([
4, 0, 3, 4])
print(click_list)
print(exam_p_list)
print(click_p_list)
print(test_model.estimatePropensityWeightsForOneList(click_list))
def test_load_from_file():
file_name = sys.argv[1]
click_model = None
with open(file_name) as fin:
data = json.load(fin)
click_model = loadModelFromJson(data)
click_list, exam_p_list, click_p_list = click_model.sampleClicksForOneList([
4, 0, 3, 4])
print(click_list)
print(exam_p_list)
print(click_p_list)
print(click_model.estimatePropensityWeightsForOneList(click_list))
def main():
MODELS = {
'pbm': PositionBiasedModel,
'cascade': CascadeModel,
'ubm': UserBrowsingModel,
}
model_name = sys.argv[1]
neg_click_prob = float(sys.argv[2])
pos_click_prob = float(sys.argv[3])
max_relevance_grade = int(sys.argv[4])
eta = float(sys.argv[5])
output_path = sys.argv[6]
click_model = MODELS[model_name](neg_click_prob, pos_click_prob,
max_relevance_grade, eta)
with open(output_path + '/' + '_'.join(sys.argv[1:6]) + '.json', 'w') as fout:
fout.write(
json.dumps(
click_model.getModelJson(),
indent=4,
sort_keys=True))
if __name__ == "__main__":
# test_load_from_file()
main()
| 38.806061
| 121
| 0.598417
| 2,681
| 19,209
| 4.035062
| 0.074226
| 0.02958
| 0.032169
| 0.024404
| 0.813551
| 0.774635
| 0.755038
| 0.726844
| 0.703642
| 0.692457
| 0
| 0.047672
| 0.29127
| 19,209
| 494
| 122
| 38.884615
| 0.746952
| 0.060701
| 0
| 0.659847
| 0
| 0
| 0.039865
| 0.004164
| 0
| 0
| 0
| 0
| 0
| 1
| 0.122762
| false
| 0
| 0.012788
| 0.040921
| 0.248082
| 0.063939
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
53a40757b8315439ac1d6749f3e59af44a85db74
| 106
|
py
|
Python
|
shared/app_settings.py
|
dArignac/shared
|
9eee5fb102818a5e63e26232e2ad7a5d904cf1b1
|
[
"MIT"
] | null | null | null |
shared/app_settings.py
|
dArignac/shared
|
9eee5fb102818a5e63e26232e2ad7a5d904cf1b1
|
[
"MIT"
] | null | null | null |
shared/app_settings.py
|
dArignac/shared
|
9eee5fb102818a5e63e26232e2ad7a5d904cf1b1
|
[
"MIT"
] | null | null | null |
from django.conf import settings
COPYRIGHT_YEAR_START = getattr(settings, 'COPYRIGHT_YEAR_START', 2012)
| 21.2
| 70
| 0.820755
| 14
| 106
| 5.928571
| 0.714286
| 0.409639
| 0.506024
| 0.626506
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042105
| 0.103774
| 106
| 4
| 71
| 26.5
| 0.831579
| 0
| 0
| 0
| 0
| 0
| 0.188679
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
53bcc9ed9444d5224dbdbc4b62e34f3207ee297c
| 95
|
py
|
Python
|
app/discal/__main__.py
|
Shirataki2/DisCalendar
|
cfb5ecad6c65911fbb041cbc585d86588de125f5
|
[
"MIT"
] | 6
|
2020-11-29T08:04:07.000Z
|
2021-05-07T11:05:10.000Z
|
app/discal/__main__.py
|
Shirataki2/DisCalendar
|
cfb5ecad6c65911fbb041cbc585d86588de125f5
|
[
"MIT"
] | 139
|
2020-11-24T23:37:03.000Z
|
2022-03-30T00:18:09.000Z
|
app/discal/__main__.py
|
Shirataki2/DisCalendar
|
cfb5ecad6c65911fbb041cbc585d86588de125f5
|
[
"MIT"
] | 1
|
2021-02-01T15:07:17.000Z
|
2021-02-01T15:07:17.000Z
|
from discal.bot import Bot
import os
Bot(command_prefix="cal ").run(os.environ["BOT_TOKEN"])
| 15.833333
| 55
| 0.747368
| 16
| 95
| 4.3125
| 0.6875
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 95
| 5
| 56
| 19
| 0.811765
| 0
| 0
| 0
| 0
| 0
| 0.136842
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
53da9107a92cfcf302edff24763874a5a35b8cfd
| 35
|
py
|
Python
|
app/__init__.py
|
onap/sdc-dcae-d-tosca-lab
|
b0120c1671e8987387ccae4f21793ceb303f471c
|
[
"Apache-2.0"
] | 1
|
2021-10-15T19:47:42.000Z
|
2021-10-15T19:47:42.000Z
|
app/__init__.py
|
onap/archive-sdc-dcae-d-tosca-lab
|
b0120c1671e8987387ccae4f21793ceb303f471c
|
[
"Apache-2.0"
] | null | null | null |
app/__init__.py
|
onap/archive-sdc-dcae-d-tosca-lab
|
b0120c1671e8987387ccae4f21793ceb303f471c
|
[
"Apache-2.0"
] | 1
|
2021-10-15T19:47:34.000Z
|
2021-10-15T19:47:34.000Z
|
from app.version import __version__
| 35
| 35
| 0.885714
| 5
| 35
| 5.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 35
| 1
| 35
| 35
| 0.84375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
54faff719a208be0fc074541d1b80fcede1bc221
| 35
|
py
|
Python
|
halotools/empirical_models/assembias_models/__init__.py
|
pllim/halotools
|
6499cff09e7e0f169e4f425ee265403f6be816e8
|
[
"BSD-3-Clause"
] | 83
|
2015-01-15T14:54:16.000Z
|
2021-12-09T11:28:02.000Z
|
halotools/empirical_models/assembias_models/__init__.py
|
pllim/halotools
|
6499cff09e7e0f169e4f425ee265403f6be816e8
|
[
"BSD-3-Clause"
] | 579
|
2015-01-14T15:57:37.000Z
|
2022-01-13T18:58:44.000Z
|
halotools/empirical_models/assembias_models/__init__.py
|
pllim/halotools
|
6499cff09e7e0f169e4f425ee265403f6be816e8
|
[
"BSD-3-Clause"
] | 70
|
2015-01-14T15:15:58.000Z
|
2021-12-22T18:18:31.000Z
|
from .heaviside_assembias import *
| 17.5
| 34
| 0.828571
| 4
| 35
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 35
| 1
| 35
| 35
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
07357d360c2d7b34a3e587fdc2b9e1fa92275306
| 24
|
py
|
Python
|
flask_02_ex/app/nasa_apod/models/__init__.py
|
japinol7/flask_examples
|
09f962f6f9fad6fc291675aac441597936d1475a
|
[
"MIT"
] | 3
|
2020-09-27T13:38:13.000Z
|
2020-09-27T15:04:14.000Z
|
flask_02_ex/app/nasa_apod/models/__init__.py
|
japinol7/flask_examples
|
09f962f6f9fad6fc291675aac441597936d1475a
|
[
"MIT"
] | null | null | null |
flask_02_ex/app/nasa_apod/models/__init__.py
|
japinol7/flask_examples
|
09f962f6f9fad6fc291675aac441597936d1475a
|
[
"MIT"
] | null | null | null |
from . import nasa_apod
| 12
| 23
| 0.791667
| 4
| 24
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
074ab531c50aeb8e340b1772a36968e3b2d788df
| 30
|
py
|
Python
|
yapper/blueprints/main/__init__.py
|
brijeshb42/flask-web
|
a859fb68fe0eedf5ee872767d107f95a4e6f4856
|
[
"MIT"
] | 14
|
2015-02-20T18:31:33.000Z
|
2020-12-23T02:33:05.000Z
|
yapper/blueprints/main/__init__.py
|
brijeshb42/flask-web
|
a859fb68fe0eedf5ee872767d107f95a4e6f4856
|
[
"MIT"
] | 2
|
2015-02-21T18:49:12.000Z
|
2015-10-06T18:10:30.000Z
|
yapper/blueprints/main/__init__.py
|
brijeshb42/yapper
|
a859fb68fe0eedf5ee872767d107f95a4e6f4856
|
[
"MIT"
] | 10
|
2015-02-21T11:06:57.000Z
|
2022-02-21T01:25:34.000Z
|
from .controllers import main
| 15
| 29
| 0.833333
| 4
| 30
| 6.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 30
| 1
| 30
| 30
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ab63446758f956ca8bfd32c6ef7270028b64eeab
| 46
|
py
|
Python
|
icf/pyicf/__init__.py
|
sflis/pyicf
|
cafc4ed15a2f0bd66bb04fc4afe9245e8b15d879
|
[
"MIT"
] | 2
|
2020-02-18T22:35:35.000Z
|
2021-08-16T13:00:33.000Z
|
icf/pyicf/__init__.py
|
sflis/icf
|
cafc4ed15a2f0bd66bb04fc4afe9245e8b15d879
|
[
"MIT"
] | null | null | null |
icf/pyicf/__init__.py
|
sflis/icf
|
cafc4ed15a2f0bd66bb04fc4afe9245e8b15d879
|
[
"MIT"
] | null | null | null |
from .icffile import ICFFile
# from . import
| 11.5
| 28
| 0.73913
| 6
| 46
| 5.666667
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195652
| 46
| 3
| 29
| 15.333333
| 0.918919
| 0.282609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
db4a60e39793c856fbfcefa0c731944addf422e9
| 88
|
py
|
Python
|
tests/migrations/test_migrations_squashed_ref_squashed/app1/1_auto.py
|
Yoann-Vie/esgi-hearthstone
|
115d03426c7e8e80d89883b78ac72114c29bed12
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
tests/migrations/test_migrations_squashed_ref_squashed/app1/1_auto.py
|
Yoann-Vie/esgi-hearthstone
|
115d03426c7e8e80d89883b78ac72114c29bed12
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
tests/migrations/test_migrations_squashed_ref_squashed/app1/1_auto.py
|
Yoann-Vie/esgi-hearthstone
|
115d03426c7e8e80d89883b78ac72114c29bed12
|
[
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null |
from django.db import migrations
class Migration(migrations.Migration):
pass
| 14.666667
| 39
| 0.738636
| 10
| 88
| 6.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.204545
| 88
| 5
| 40
| 17.6
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
db53b48e30ca278114d6c64abef55e30400829de
| 42,917
|
py
|
Python
|
tests/test_export_rsps.py
|
gustavofonseca/articles_meta
|
50904c33827f51ee0ce4e5c9a89ddc21eb155e6d
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_export_rsps.py
|
gustavofonseca/articles_meta
|
50904c33827f51ee0ce4e5c9a89ddc21eb155e6d
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_export_rsps.py
|
gustavofonseca/articles_meta
|
50904c33827f51ee0ce4e5c9a89ddc21eb155e6d
|
[
"BSD-2-Clause"
] | null | null | null |
# coding: utf-8
import unittest
from lxml import etree as ET
import json
import os
from lxml import etree
from xylose.scielodocument import Article
from articlemeta import export_rsps
from articlemeta import export
class XMLCitationTests(unittest.TestCase):
def setUp(self):
self._raw_json = json.loads(open(os.path.dirname(__file__)+'/fixtures/article_meta.json').read())
self._citation_meta = Article(self._raw_json).citations[0]
self._xmlcitation = export_rsps.XMLCitation()
def test_xml_citation_setup_pipe(self):
data = [self._citation_meta, None]
raw, xml = self._xmlcitation.SetupCitationPipe().transform(data)
rootcitation = xml.findall('.')[0].tag
self.assertEqual('ref', rootcitation)
def test_xml_citation_id_as_str_pipe(self):
pxml = ET.Element('ref')
data = [self._citation_meta, pxml]
raw, xml = self._xmlcitation.RefIdPipe().transform(data)
strid = xml.find('.').get('id')
self.assertTrue(isinstance(strid, basestring))
def test_xml_citation_element_citation_pipe(self):
pxml = ET.Element('ref')
data = [self._citation_meta, pxml]
raw, xml = self._xmlcitation.ElementCitationPipe().transform(data)
publicationtype = xml.find('./element-citation[@publication-type="journal"]').get('publication-type')
self.assertEqual(u'journal', publicationtype)
def test_xml_citation_article_title_pipe(self):
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [self._citation_meta, pxml]
raw, xml = self._xmlcitation.ArticleTitlePipe().transform(data)
expected = xml.find('./element-citation/article-title').text
self.assertEqual(u'End-stage renal disease in sub-Saharan Africa.', expected)
def test_xml_citation_article_title_without_data_pipe(self):
fakexylosearticle = Article({'article': {},
'title': {},
'citations': [{}]}).citations[0]
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [fakexylosearticle, pxml]
raw, xml = self._xmlcitation.ArticleTitlePipe().transform(data)
expected = xml.find('./element-citation/article-title')
self.assertEqual(None, expected)
def test_xml_citation_source_pipe(self):
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [self._citation_meta, pxml]
raw, xml = self._xmlcitation.SourcePipe().transform(data)
expected = xml.find('./element-citation/source').text
self.assertEqual(u'Ethn Dis.', expected)
def test_xml_citation_source_without_data_pipe(self):
fakexylosearticle = Article({'article': {},
'title': {},
'citations': [{}]}).citations[0]
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [fakexylosearticle, pxml]
raw, xml = self._xmlcitation.SourcePipe().transform(data)
expected = xml.find('./element-citation/source')
self.assertEqual(None, expected)
def test_xml_citation_date_pipe(self):
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [self._citation_meta, pxml]
raw, xml = self._xmlcitation.DatePipe().transform(data)
expected = xml.find('./element-citation/date/year').text
self.assertEqual(u'2006', expected)
def test_xml_citation_date_with_year_and_month_pipe(self):
fakexylosearticle = Article({'article': {},
'title': {},
'citations': [{'v65': [{'_': '200604'}]}]}).citations[0]
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [fakexylosearticle, pxml]
raw, xml = self._xmlcitation.DatePipe().transform(data)
expected_year = xml.find('./element-citation/date/year').text
expected_month = xml.find('./element-citation/date/month').text
self.assertEqual(u'2006', expected_year)
self.assertEqual(u'04', expected_month)
def test_xml_citation_date_with_year_and_month_and_day_pipe(self):
fakexylosearticle = Article({'article': {},
'title': {},
'citations': [{'v65': [{'_': '20060430'}]}]}).citations[0]
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [fakexylosearticle, pxml]
raw, xml = self._xmlcitation.DatePipe().transform(data)
expected_year = xml.find('./element-citation/date/year').text
expected_month = xml.find('./element-citation/date/month').text
expected_day = xml.find('./element-citation/date/day').text
self.assertEqual(u'2006', expected_year)
self.assertEqual(u'04', expected_month)
self.assertEqual(u'30', expected_day)
def test_xml_citation_date_without_data_pipe(self):
fakexylosearticle = Article({'article': {},
'title': {},
'citations': [{}]}).citations[0]
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [fakexylosearticle, pxml]
raw, xml = self._xmlcitation.DatePipe().transform(data)
expected = xml.find('./element-citation/date')
self.assertEqual(None, expected)
def test_xml_citation_fpage_pipe(self):
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [self._citation_meta, pxml]
raw, xml = self._xmlcitation.StartPagePipe().transform(data)
expected = xml.find('./element-citation/fpage').text
self.assertEqual(u'2,5,9', expected)
def test_xml_citation_fpage_without_data_pipe(self):
fakexylosearticle = Article({'article': {},
'title': {},
'citations': [{}]}).citations[0]
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [fakexylosearticle, pxml]
raw, xml = self._xmlcitation.StartPagePipe().transform(data)
expected = xml.find('./element-citation/fpage')
self.assertEqual(None, expected)
def test_xml_citation_lpage_pipe(self):
fakexylosearticle = Article({'article': {},
'title': {},
'citations': [{'v14': [{'_': '120-130'}]}]}).citations[0]
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [fakexylosearticle, pxml]
raw, xml = self._xmlcitation.EndPagePipe().transform(data)
expected = xml.find('./element-citation/lpage').text
self.assertEqual(u'130', expected)
def test_xml_citation_lpage_without_data_pipe(self):
fakexylosearticle = Article({'article': {},
'title': {},
'citations': [{}]}).citations[0]
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [fakexylosearticle, pxml]
raw, xml = self._xmlcitation.EndPagePipe().transform(data)
expected = xml.find('./element-citation/lpage')
self.assertEqual(None, expected)
def test_xml_citation_volume_pipe(self):
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [self._citation_meta, pxml]
raw, xml = self._xmlcitation.VolumePipe().transform(data)
expected = xml.find('./element-citation/volume').text
self.assertEqual(u'16', expected)
def test_xml_citation_volume_without_data_pipe(self):
fakexylosearticle = Article({'article': {},
'title': {},
'citations': [{}]}).citations[0]
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [fakexylosearticle, pxml]
raw, xml = self._xmlcitation.VolumePipe().transform(data)
expected = xml.find('./element-citation/volume')
self.assertEqual(None, expected)
def test_xml_citation_issue_pipe(self):
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [self._citation_meta, pxml]
raw, xml = self._xmlcitation.IssuePipe().transform(data)
expected = xml.find('./element-citation/issue').text
self.assertEqual(u'2', expected)
def test_xml_citation_issue_without_data_pipe(self):
fakexylosearticle = Article({'article': {},
'title': {},
'citations': [{}]}).citations[0]
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [fakexylosearticle, pxml]
raw, xml = self._xmlcitation.IssuePipe().transform(data)
expected = xml.find('./element-citation/issue')
self.assertEqual(None, expected)
def test_xml_citation_person_group_len_pipe(self):
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [self._citation_meta, pxml]
raw, xml = self._xmlcitation.PersonGroupPipe().transform(data)
expected = len(xml.findall('./element-citation/person-group/name'))
self.assertEqual(1, expected)
def test_xml_citation_person_group_given_names_pipe(self):
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [self._citation_meta, pxml]
raw, xml = self._xmlcitation.PersonGroupPipe().transform(data)
result = xml.find('./element-citation/person-group[@person-group-type="author"]/name/given-names').text
self.assertEqual('EL', result)
def test_xml_citation_person_group_surname_pipe(self):
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [self._citation_meta, pxml]
raw, xml = self._xmlcitation.PersonGroupPipe().transform(data)
result = xml.find('./element-citation/person-group[@person-group-type="author"]/name/surname').text
self.assertEqual('Bamgboye', result)
def test_xml_citation_person_group_without_data_pipe(self):
fakexylosearticle = Article({'article': {},
'title': {},
'citations': [{}]}).citations[0]
pxml = ET.Element('ref')
pxml.append(ET.Element('element-citation'))
data = [fakexylosearticle, pxml]
raw, xml = self._xmlcitation.PersonGroupPipe().transform(data)
expected = xml.find('./element-citation/person-group')
self.assertEqual(None, expected)
class ExportTests(unittest.TestCase):
def setUp(self):
self._raw_json = json.loads(open(os.path.dirname(__file__)+'/fixtures/article_meta.json').read())
self._article_meta = Article(self._raw_json)
def test_xmlclose_pipe(self):
pxml = ET.Element('article')
data = [None, pxml]
xmlarticle = export_rsps.XMLClosePipe()
xml = xmlarticle.transform(data)
self.assertEqual('<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.0 20120330//EN" "JATS-journalpublishing1.dtd">\n<article/>', xml)
def test_setuppipe_element_name(self):
data = [None, None]
xmlarticle = export_rsps.SetupArticlePipe()
raw, xml = xmlarticle.transform(data)
self.assertEqual('article', xml.tag)
def test_setuppipe_attributes_specific_use(self):
data = [None, None]
xmlarticle = export_rsps.SetupArticlePipe()
raw, xml = xmlarticle.transform(data)
self.assertTrue('sps-1.1', xml.find('.').get('specific-use'))
def test_setuppipe_attributes_dtd_version(self):
data = [None, None]
xmlarticle = export_rsps.SetupArticlePipe()
raw, xml = xmlarticle.transform(data)
self.assertTrue('1.0', xml.find('.').get('dtd-version'))
def test_xmlarticle_pipe(self):
pxml = ET.Element('article')
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticlePipe()
raw, xml = xmlarticle.transform(data)
self.assertEqual('<article xml:lang="pt" article-type="research-article"/>', ET.tostring(xml))
def test_xmlfront_pipe(self):
pxml = ET.Element('article')
data = [None, pxml]
xmlarticle = export_rsps.XMLFrontPipe()
raw, xml = xmlarticle.transform(data)
self.assertEqual('<article><front><journal-meta/><article-meta/></front></article>', ET.tostring(xml))
def test_xmljournal_id_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('journal-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLJournalMetaJournalIdPipe()
raw, xml = xmlarticle.transform(data)
self.assertEqual('<article><front><journal-meta><journal-id journal-id-type="publisher-id">rsp</journal-id></journal-meta></front></article>', ET.tostring(xml))
def test_xmljournal_meta_journal_title_group_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('journal-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLJournalMetaJournalTitleGroupPipe()
raw, xml = xmlarticle.transform(data)
title = xml.find('./front/journal-meta/journal-title-group/journal-title').text
self.assertEqual(u'Revista de Saúde Pública', title)
def test_xmljournal_meta_abbrev_journal_title_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('journal-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLJournalMetaJournalTitleGroupPipe()
raw, xml = xmlarticle.transform(data)
abbrevtitle = xml.find('./front/journal-meta/journal-title-group/abbrev-journal-title').text
self.assertEqual(u'Rev. Saúde Pública', abbrevtitle)
def test_xmljournal_meta_abbrev_journal_title_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('journal-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLJournalMetaJournalTitleGroupPipe()
raw, xml = xmlarticle.transform(data)
abbrevtype = xml.find('./front/journal-meta/journal-title-group/abbrev-journal-title').get('abbrev-type')
self.assertEqual(u'publisher', abbrevtype)
def test_xmljournal_meta_print_issn_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('journal-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLJournalMetaISSNPipe()
raw, xml = xmlarticle.transform(data)
issn = xml.find('./front/journal-meta/issn[@pub-type="ppub"]').text
self.assertEqual(u'0034-8910', issn)
def test_xmljournal_meta_electronic_issn_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('journal-meta'))
self._article_meta.data['title']['v400'][0]['_'] = 'XXXX-XXXX'
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLJournalMetaISSNPipe()
raw, xml = xmlarticle.transform(data)
issn = xml.find('./front/journal-meta/issn[@pub-type="epub"]').text
self.assertEqual(u'XXXX-XXXX', issn)
def test_xmljournal_meta_publisher_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('journal-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLJournalMetaPublisherPipe()
raw, xml = xmlarticle.transform(data)
publishername = xml.find('./front/journal-meta/publisher/publisher-name').text
publisherloc = xml.find('./front/journal-meta/publisher/publisher-loc').text
self.assertEqual(u'Faculdade de Saúde Pública da Universidade de São Paulo', publishername)
self.assertEqual(u'São Paulo', publisherloc)
def test_xml_article_meta_article_id_publisher_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaArticleIdPublisherPipe()
raw, xml = xmlarticle.transform(data)
articleidpublisher = xml.find('./front/article-meta/article-id[@pub-id-type="publisher-id"]').text
self.assertEqual(u'S0034-89102010000400007', articleidpublisher)
def test_xml_article_meta_article_id_doi_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaArticleIdDOIPipe()
raw, xml = xmlarticle.transform(data)
articleidpublisher = xml.find('./front/article-meta/article-id[@pub-id-type="doi"]').text
self.assertEqual(u'10.1590/S0034-89102010000400007', articleidpublisher)
def test_xml_article_meta_article_id_doi_without_data_pipe(self):
fakexylosearticle = Article({'article': {}, 'title': {}})
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [fakexylosearticle, pxml]
xmlarticle = export_rsps.XMLArticleMetaArticleIdDOIPipe()
raw, xml = xmlarticle.transform(data)
# This try except is a trick to test the expected result of the
# piped XML, once the precond method don't raise an exception
# we try to check if the preconditioned pipe was called or not.
try:
xml.find('./front/article-meta/article-id[@pub-id-type="doi"]').text
except AttributeError:
self.assertTrue(True)
else:
self.assertTrue(False)
def test_xmlarticle_meta_article_categories_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaArticleCategoriesPipe()
raw, xml = xmlarticle.transform(data)
categories = [i.text for i in xml.findall('./front/article-meta/article-categories/subj-group[@subj-group-type="heading"]/subject')]
self.assertEqual([u'PUBLIC, ENVIRONMENTAL & OCCUPATIONAL HEALTH'], categories)
def test_xmlarticle_meta_article_categories_without_data_pipe(self):
fakexylosearticle = Article({'article': {}, 'title': {}})
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [fakexylosearticle, pxml]
xmlarticle = export_rsps.XMLArticleMetaArticleCategoriesPipe()
raw, xml = xmlarticle.transform(data)
self.assertEqual(None, xml.find('./front/article-meta/article-categories/subj-group/subject'))
def test_xmlarticle_meta_title_group_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaTitleGroupPipe()
raw, xml = xmlarticle.transform(data)
title = xml.find('./front/article-meta/title-group/article-title').text
self.assertEqual(u'Perfil epidemiológico dos pacientes em terapia renal substitutiva no Brasil, 2000-2004', title)
def test_xmlarticle_meta_translated_title_group_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
articlemeta = front.find('article-meta')
articlemeta.append(ET.Element('title-group'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaTranslatedTitleGroupPipe()
raw, xml = xmlarticle.transform(data)
titles = [i.find('trans-title').text for i in xml.findall('./front/article-meta/title-group/trans-title-group')]
self.assertEqual([u'Epidemiological profile of patients on renal replacement therapy in Brazil, 2000-2004',
u'Perfil epidemiológico de los pacientes en terapia renal substitutiva en Brasil, 2000-2004'], titles)
def test_xmlarticle_meta_translated_title_group_without_data_pipe(self):
fakexylosearticle = Article({'article': {}, 'title': {}})
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
articlemeta = front.find('article-meta')
articlemeta.append(ET.Element('title-group'))
data = [fakexylosearticle, pxml]
xmlarticle = export_rsps.XMLArticleMetaContribGroupPipe()
raw, xml = xmlarticle.transform(data)
titles = [i.find('trans-title').text for i in xml.findall('./front/article-meta/title-group/trans-title-group')]
self.assertEqual([], titles)
def test_xmlarticle_meta_contrib_group_author_names_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaContribGroupPipe()
raw, xml = xmlarticle.transform(data)
fullnames = [' '.join([i.find('given-names').text, i.find('surname').text]) for i in xml.findall('./front/article-meta/contrib-group/contrib/name')]
self.assertEqual([u'Mariangela Leal Cherchiglia',
u'Elaine Leandro Machado',
u'Daniele Araújo Campo Szuster',
u'Eli Iola Gurgel Andrade',
u'Francisco de Assis Acúrcio',
u'Waleska Teixeira Caiaffa',
u'Ricardo Sesso',
u'Augusto A Guerra Junior',
u'Odilon Vanni de Queiroz',
u'Isabel Cristina Gomes'], fullnames)
def test_xmlarticle_meta_contrib_group_author_roles_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaContribGroupPipe()
raw, xml = xmlarticle.transform(data)
fullnames = [i.text for i in xml.findall('./front/article-meta/contrib-group/contrib/role')]
self.assertEqual([u'ND', u'ND', u'ND', u'ND', u'ND', u'ND', u'ND',
u'ND', u'ND', u'ND'], fullnames)
def test_xmlarticle_meta_contrib_group_author_xrefs_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaContribGroupPipe()
raw, xml = xmlarticle.transform(data)
fullnames = [i.get('rid') for i in xml.findall('./front/article-meta/contrib-group/contrib/xref')]
self.assertEqual([u'aff01', u'aff01', u'aff01', u'aff01', u'aff01', u'aff01', u'aff02',
u'aff01', u'aff02', u'aff01', u'aff03'], fullnames)
def test_xmlarticle_meta_contrib_group_author_without_xrefs_pipe(self):
del(self._raw_json['article']['v71'])
article_meta = Article(self._raw_json)
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaContribGroupPipe()
raw, xml = xmlarticle.transform(data)
fullnames = [i.get('rid') for i in xml.findall('./front/article-meta/contrib-group/contrib/xref')]
self.assertEqual([u'aff01', u'aff01', u'aff01', u'aff01', u'aff01', u'aff01', u'aff02',
u'aff01', u'aff02', u'aff01', u'aff03'], fullnames)
def test_xmlarticle_meta_contrib_group_without_data_pipe(self):
fakexylosearticle = Article({'article': {}, 'title': {}})
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [fakexylosearticle, pxml]
xmlarticle = export_rsps.XMLArticleMetaContribGroupPipe()
raw, xml = xmlarticle.transform(data)
titles = [i.find('contrib-group').text for i in xml.findall('./front/article-meta/contrib-group/contrib')]
self.assertEqual([], titles)
def test_xmlarticle_meta_affiliation_without_data_pipe(self):
fakexylosearticle = Article({'article': {}, 'title': {}})
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [fakexylosearticle, pxml]
xmlarticle = export_rsps.XMLArticleMetaAffiliationPipe()
raw, xml = xmlarticle.transform(data)
affiliations = [i.find('institution').text for i in xml.findall('./front/article-meta/aff')]
self.assertEqual([], affiliations)
def test_xmlarticle_meta_affiliation_institution_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaAffiliationPipe()
raw, xml = xmlarticle.transform(data)
affiliations = [i.find('institution').text for i in xml.findall('./front/article-meta/aff')]
self.assertEqual([u'Universidade Federal de Minas Gerais',
u'Universidade Federal de São Paulo',
u'Universidade Federal de Minas Gerais'], affiliations)
def test_xmlarticle_meta_affiliation_index_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaAffiliationPipe()
raw, xml = xmlarticle.transform(data)
indexes = [i.get('id') for i in xml.findall('./front/article-meta/aff')]
self.assertEqual([u'aff01',
u'aff02',
u'aff03'], indexes)
def test_xmlarticle_meta_affiliation_country_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaAffiliationPipe()
raw, xml = xmlarticle.transform(data)
countries = [i.find('country').text for i in xml.findall('./front/article-meta/aff')]
self.assertEqual([u'BRAZIL',
u'BRAZIL',
u'BRAZIL'], countries)
def test_xmlarticle_meta_affiliation_address_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaAffiliationPipe()
raw, xml = xmlarticle.transform(data)
address = [i.find('addr-line').text for i in xml.findall('./front/article-meta/aff')]
self.assertEqual([u'Belo Horizonte',
u'São Paulo',
u'Belo Horizonte'], address)
def test_xmlarticle_meta_general_info_pub_year_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaGeneralInfoPipe()
raw, xml = xmlarticle.transform(data)
pub_year = xml.find('./front/article-meta/pub-date[@pub-type="epub-ppub"]/year').text
self.assertEqual(u'2010', pub_year)
def test_xmlarticle_meta_general_info_pub_year_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
self._article_meta.data['title']['v35'][0]['_'] = 'ONLIN'
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaGeneralInfoPipe()
raw, xml = xmlarticle.transform(data)
pub_year = xml.find('./front/article-meta/pub-date[@pub-type="epub-ppub"]/year').text
self.assertEqual(u'2010', pub_year)
def test_xmlarticle_meta_general_info_pub_month_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaGeneralInfoPipe()
raw, xml = xmlarticle.transform(data)
pub_month = xml.find('./front/article-meta/pub-date/month').text
self.assertEqual(u'08', pub_month)
def test_xmlarticle_meta_general_info_first_page_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaGeneralInfoPipe()
raw, xml = xmlarticle.transform(data)
fpage = xml.find('./front/article-meta/fpage').text
self.assertEqual(u'639', fpage)
def test_xmlarticle_meta_general_info_without_first_page_pipe(self):
fakexylosearticle = Article({'article': {'v65': [{'_': '201008'}]}, 'title': {}})
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [fakexylosearticle, pxml]
xmlarticle = export_rsps.XMLArticleMetaGeneralInfoPipe()
raw, xml = xmlarticle.transform(data)
fpage = xml.find('./front/article-meta/fpage')
self.assertEqual(None, fpage)
def test_xmlarticle_meta_general_info_last_page_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaGeneralInfoPipe()
raw, xml = xmlarticle.transform(data)
lpage = xml.find('./front/article-meta/lpage').text
self.assertEqual(u'649', lpage)
def test_xmlarticle_meta_general_info_without_last_page_pipe(self):
fakexylosearticle = Article({'article': {'v65': [{'_': '201008'}]}, 'title': {}})
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [fakexylosearticle, pxml]
xmlarticle = export_rsps.XMLArticleMetaGeneralInfoPipe()
raw, xml = xmlarticle.transform(data)
lpage = xml.find('./front/article-meta/lpage')
self.assertEqual(None, lpage)
def test_xmlarticle_meta_general_info_volume_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaGeneralInfoPipe()
raw, xml = xmlarticle.transform(data)
volume = xml.find('./front/article-meta/volume').text
self.assertEqual(u'44', volume)
def test_xmlarticle_meta_general_info_without_volume_pipe(self):
fakexylosearticle = Article({'article': {'v65': [{'_': '201008'}]}, 'title': {}})
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [fakexylosearticle, pxml]
xmlarticle = export_rsps.XMLArticleMetaGeneralInfoPipe()
raw, xml = xmlarticle.transform(data)
volume = xml.find('./front/article-meta/volume')
self.assertEqual(None, volume)
def test_xmlarticle_meta_general_info_issue_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaGeneralInfoPipe()
raw, xml = xmlarticle.transform(data)
issue = xml.find('./front/article-meta/issue').text
self.assertEqual(u'4', issue)
def test_xmlarticle_meta_general_info_without_issue_pipe(self):
fakexylosearticle = Article({'article': {'v65': [{'_': '201008'}]}, 'title': {}})
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [fakexylosearticle, pxml]
xmlarticle = export_rsps.XMLArticleMetaGeneralInfoPipe()
raw, xml = xmlarticle.transform(data)
issue = xml.find('./front/article-meta/issue')
self.assertEqual(None, issue)
def test_xmlarticle_meta_original_language_abstract_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaAbstractsPipe()
raw, xml = xmlarticle.transform(data)
abstract = xml.find('./front/article-meta/abstract/p').text[0:30]
self.assertEqual(u'OBJETIVO: Descrever o perfil e', abstract)
def test_xmlarticle_meta_original_language_abstract_without_data_pipe(self):
fakexylosearticle = Article({'article': {'v40': [{'_': 'pt'}]}, 'title': {}})
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [fakexylosearticle, pxml]
xmlarticle = export_rsps.XMLArticleMetaAbstractsPipe()
raw, xml = xmlarticle.transform(data)
abstract = xml.find('./front/article-meta/abstract/p')
self.assertEqual(None, abstract)
def test_xmlarticle_meta_translated_abstract_without_data_pipe(self):
fakexylosearticle = Article({'article': {'v40': [{'_': 'pt'}]}, 'title': {}})
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [fakexylosearticle, pxml]
xmlarticle = export_rsps.XMLArticleMetaAbstractsPipe()
raw, xml = xmlarticle.transform(data)
abstract = xml.find('./front/article-meta/trans-abstract/p')
self.assertEqual(None, abstract)
def test_xmlarticle_meta_keywords_without_data_pipe(self):
fakexylosearticle = Article({'article': {'v40': [{'_': 'pt'}]}, 'title': {}})
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [fakexylosearticle, pxml]
xmlarticle = export_rsps.XMLArticleMetaKeywordsPipe()
raw, xml = xmlarticle.transform(data)
keywords_language = xml.find('./front/article-meta/kwd-group')
self.assertEqual(None, keywords_language)
def test_xmlarticle_meta_keywords_languages_data_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaKeywordsPipe()
raw, xml = xmlarticle.transform(data)
keywords_language = [i.get('{http://www.w3.org/XML/1998/namespace}lang') for i in xml.findall('./front/article-meta/kwd-group')]
self.assertEqual([u'en', u'es', u'pt'], keywords_language)
def test_xmlarticle_meta_keywords_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaKeywordsPipe()
raw, xml = xmlarticle.transform(data)
keywords = [i.text for i in xml.findall('.//kwd')]
self.assertEqual([u'Renal Insufficiency, Chronic',
u'Renal Replacement Therapy',
u'Hospital Information Systems',
u'Mortality Registries',
u'Insuficiencia Renal Crónica',
u'Terapia de Reemplazo Renal',
u'Sistemas de Información en Hospital',
u'Registros de Mortalidad',
u'Insuficiência Renal Crônica',
u'Terapia de Substituição Renal',
u'Sistemas de Informação Hospitalar',
u'Registros de Mortalidade'], keywords)
def test_xml_article_meta_counts_citations_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaCountsPipe()
raw, xml = xmlarticle.transform(data)
count = xml.find('./front/article-meta/counts/ref-count').get('count')
self.assertEqual(23, int(count))
def test_xml_article_meta_counts_pages_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaCountsPipe()
raw, xml = xmlarticle.transform(data)
count = xml.find('./front/article-meta/counts/page-count').get('count')
self.assertEqual(10, int(count))
def test_xml_article_meta_counts_pages_invalid_pages_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
self._article_meta.data['article']['v14'][0]['l'] = 'invalidpage'
self._article_meta.data['article']['v14'][0]['f'] = 'invalidpage'
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaCountsPipe()
raw, xml = xmlarticle.transform(data)
count = xml.find('./front/article-meta/counts/page-count').get('count')
self.assertEqual(0, int(count))
def test_xml_article_meta_counts_pages_invalid_pages_first_gt_last_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
self._article_meta.data['article']['v14'][0]['l'] = '100'
self._article_meta.data['article']['v14'][0]['f'] = '110'
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaCountsPipe()
raw, xml = xmlarticle.transform(data)
count = xml.find('./front/article-meta/counts/page-count').get('count')
self.assertEqual(0, int(count))
def test_xml_article_meta_permission_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('front'))
front = pxml.find('front')
front.append(ET.Element('article-meta'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaPermissionPipe()
raw, xml = xmlarticle.transform(data)
citations = xml.find('./front/articlemeta/permissions/lincense[@license-type="open-access"]')
self.assertEqual(None, citations)
def test_xml_citations_without_data_pipe(self):
fakexylosearticle = Article({'article': {}, 'title': {}, 'citatons': {}})
pxml = ET.Element('article')
pxml.append(ET.Element('back'))
back = pxml.find('back')
back.append(ET.Element('ref-list'))
data = [fakexylosearticle, pxml]
xmlarticle = export_rsps.XMLArticleMetaKeywordsPipe()
raw, xml = xmlarticle.transform(data)
citations = xml.find('./article/back/ref-list/ref')
self.assertEqual(None, citations)
def test_xml_citations_count_pipe(self):
pxml = ET.Element('article')
pxml.append(ET.Element('back'))
back = pxml.find('back')
back.append(ET.Element('ref-list'))
data = [self._article_meta, pxml]
xmlarticle = export_rsps.XMLArticleMetaCitationsPipe()
raw, xml = xmlarticle.transform(data)
citations = len(xml.findall('./back/ref-list/ref'))
self.assertEqual(23, citations)
| 31.813936
| 169
| 0.625463
| 4,718
| 42,917
| 5.539423
| 0.085841
| 0.066807
| 0.068873
| 0.050163
| 0.847446
| 0.828161
| 0.791123
| 0.747427
| 0.709853
| 0.69493
| 0
| 0.009679
| 0.236829
| 42,917
| 1,348
| 170
| 31.837537
| 0.788264
| 0.00459
| 0
| 0.667503
| 0
| 0.006274
| 0.171981
| 0.071405
| 0
| 0
| 0
| 0
| 0.104141
| 1
| 0.100376
| false
| 0
| 0.010038
| 0
| 0.112923
| 0.001255
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
db8e0ef52e20fc2dd30566d28670aa27da377c4b
| 69
|
py
|
Python
|
deepmachine/data/builder/__init__.py
|
yuxiang-zhou/deepmachine
|
b8a64354f7d37664172ef79a66b1fc0a9fa0f493
|
[
"MIT"
] | 1
|
2018-09-04T11:12:11.000Z
|
2018-09-04T11:12:11.000Z
|
deepmachine/data/builder/__init__.py
|
yuxiang-zhou/deepmachine
|
b8a64354f7d37664172ef79a66b1fc0a9fa0f493
|
[
"MIT"
] | null | null | null |
deepmachine/data/builder/__init__.py
|
yuxiang-zhou/deepmachine
|
b8a64354f7d37664172ef79a66b1fc0a9fa0f493
|
[
"MIT"
] | null | null | null |
from .base import *
from .builder import *
from . import db_iterator
| 17.25
| 25
| 0.753623
| 10
| 69
| 5.1
| 0.6
| 0.392157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 69
| 3
| 26
| 23
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dba0e803e5f9a6f087356d1bb08baad5b5a4c580
| 482
|
py
|
Python
|
runway/commands/__init__.py
|
rgitzel/runway
|
bd759009a479544760ba9f68eb38de1976fd1d27
|
[
"Apache-2.0"
] | null | null | null |
runway/commands/__init__.py
|
rgitzel/runway
|
bd759009a479544760ba9f68eb38de1976fd1d27
|
[
"Apache-2.0"
] | null | null | null |
runway/commands/__init__.py
|
rgitzel/runway
|
bd759009a479544760ba9f68eb38de1976fd1d27
|
[
"Apache-2.0"
] | null | null | null |
"""Collect all the command classes together."""
from .runway import gen_sample # noqa
from .runway import gitclean # noqa
from .runway import init # noqa
from .runway import preflight # noqa
from .runway import test # noqa
from .runway import whichenv # noqa
from .modules import deploy # noqa
from .modules import destroy # noqa
from .modules import dismantle # noqa
from .modules import plan # noqa
from .modules import takeoff # noqa
from .modules import taxi # noqa
| 30.125
| 47
| 0.748963
| 67
| 482
| 5.373134
| 0.358209
| 0.244444
| 0.266667
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186722
| 482
| 15
| 48
| 32.133333
| 0.918367
| 0.211618
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dba874fc59c152379ee105129863b15cb5fb3415
| 14,826
|
py
|
Python
|
machine/qemu/sources/u-boot/test/py/tests/test_fs/test_ext.py
|
muddessir/framework
|
5b802b2dd7ec9778794b078e748dd1f989547265
|
[
"MIT"
] | 1
|
2021-11-21T19:56:29.000Z
|
2021-11-21T19:56:29.000Z
|
machine/qemu/sources/u-boot/test/py/tests/test_fs/test_ext.py
|
muddessir/framework
|
5b802b2dd7ec9778794b078e748dd1f989547265
|
[
"MIT"
] | null | null | null |
machine/qemu/sources/u-boot/test/py/tests/test_fs/test_ext.py
|
muddessir/framework
|
5b802b2dd7ec9778794b078e748dd1f989547265
|
[
"MIT"
] | null | null | null |
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2018, Linaro Limited
# Author: Takahiro Akashi <takahiro.akashi@linaro.org>
#
# U-Boot File System:Exntented Test
"""
This test verifies extended write operation on file system.
"""
import pytest
import re
from fstest_defs import *
from fstest_helpers import assert_fs_integrity
@pytest.mark.boardspec('sandbox')
@pytest.mark.slow
class TestFsExt(object):
def test_fs_ext1(self, u_boot_console, fs_obj_ext):
"""
Test Case 1 - write a file with absolute path
"""
fs_type,fs_img,md5val = fs_obj_ext
with u_boot_console.log.section('Test Case 1 - write with abs path'):
# Test Case 1a - Check if command successfully returned
output = u_boot_console.run_command_list([
'host bind 0 %s' % fs_img,
'%sload host 0:0 %x /%s' % (fs_type, ADDR, MIN_FILE),
'%swrite host 0:0 %x /dir1/%s.w1 $filesize'
% (fs_type, ADDR, MIN_FILE)])
assert('20480 bytes written' in ''.join(output))
# Test Case 1b - Check md5 of file content
output = u_boot_console.run_command_list([
'mw.b %x 00 100' % ADDR,
'%sload host 0:0 %x /dir1/%s.w1' % (fs_type, ADDR, MIN_FILE),
'md5sum %x $filesize' % ADDR,
'setenv filesize'])
assert(md5val[0] in ''.join(output))
assert_fs_integrity(fs_type, fs_img)
def test_fs_ext2(self, u_boot_console, fs_obj_ext):
"""
Test Case 2 - write to a file with relative path
"""
fs_type,fs_img,md5val = fs_obj_ext
with u_boot_console.log.section('Test Case 2 - write with rel path'):
# Test Case 2a - Check if command successfully returned
output = u_boot_console.run_command_list([
'host bind 0 %s' % fs_img,
'%sload host 0:0 %x /%s' % (fs_type, ADDR, MIN_FILE),
'%swrite host 0:0 %x dir1/%s.w2 $filesize'
% (fs_type, ADDR, MIN_FILE)])
assert('20480 bytes written' in ''.join(output))
# Test Case 2b - Check md5 of file content
output = u_boot_console.run_command_list([
'mw.b %x 00 100' % ADDR,
'%sload host 0:0 %x dir1/%s.w2' % (fs_type, ADDR, MIN_FILE),
'md5sum %x $filesize' % ADDR,
'setenv filesize'])
assert(md5val[0] in ''.join(output))
assert_fs_integrity(fs_type, fs_img)
def test_fs_ext3(self, u_boot_console, fs_obj_ext):
"""
Test Case 3 - write to a file with invalid path
"""
fs_type,fs_img,md5val = fs_obj_ext
with u_boot_console.log.section('Test Case 3 - write with invalid path'):
# Test Case 3 - Check if command expectedly failed
output = u_boot_console.run_command_list([
'host bind 0 %s' % fs_img,
'%sload host 0:0 %x /%s' % (fs_type, ADDR, MIN_FILE),
'%swrite host 0:0 %x /dir1/none/%s.w3 $filesize'
% (fs_type, ADDR, MIN_FILE)])
assert('Unable to write file /dir1/none/' in ''.join(output))
assert_fs_integrity(fs_type, fs_img)
def test_fs_ext4(self, u_boot_console, fs_obj_ext):
"""
Test Case 4 - write at non-zero offset, enlarging file size
"""
fs_type,fs_img,md5val = fs_obj_ext
with u_boot_console.log.section('Test Case 4 - write at non-zero offset, enlarging file size'):
# Test Case 4a - Check if command successfully returned
output = u_boot_console.run_command_list([
'host bind 0 %s' % fs_img,
'%sload host 0:0 %x /%s' % (fs_type, ADDR, MIN_FILE),
'%swrite host 0:0 %x /dir1/%s.w4 $filesize'
% (fs_type, ADDR, MIN_FILE)])
output = u_boot_console.run_command(
'%swrite host 0:0 %x /dir1/%s.w4 $filesize 0x1400'
% (fs_type, ADDR, MIN_FILE))
assert('20480 bytes written' in output)
# Test Case 4b - Check size of written file
output = u_boot_console.run_command_list([
'%ssize host 0:0 /dir1/%s.w4' % (fs_type, MIN_FILE),
'printenv filesize',
'setenv filesize'])
assert('filesize=6400' in ''.join(output))
# Test Case 4c - Check md5 of file content
output = u_boot_console.run_command_list([
'mw.b %x 00 100' % ADDR,
'%sload host 0:0 %x /dir1/%s.w4' % (fs_type, ADDR, MIN_FILE),
'md5sum %x $filesize' % ADDR,
'setenv filesize'])
assert(md5val[1] in ''.join(output))
assert_fs_integrity(fs_type, fs_img)
def test_fs_ext5(self, u_boot_console, fs_obj_ext):
"""
Test Case 5 - write at non-zero offset, shrinking file size
"""
fs_type,fs_img,md5val = fs_obj_ext
with u_boot_console.log.section('Test Case 5 - write at non-zero offset, shrinking file size'):
# Test Case 5a - Check if command successfully returned
output = u_boot_console.run_command_list([
'host bind 0 %s' % fs_img,
'%sload host 0:0 %x /%s' % (fs_type, ADDR, MIN_FILE),
'%swrite host 0:0 %x /dir1/%s.w5 $filesize'
% (fs_type, ADDR, MIN_FILE)])
output = u_boot_console.run_command(
'%swrite host 0:0 %x /dir1/%s.w5 0x1400 0x1400'
% (fs_type, ADDR, MIN_FILE))
assert('5120 bytes written' in output)
# Test Case 5b - Check size of written file
output = u_boot_console.run_command_list([
'%ssize host 0:0 /dir1/%s.w5' % (fs_type, MIN_FILE),
'printenv filesize',
'setenv filesize'])
assert('filesize=2800' in ''.join(output))
# Test Case 5c - Check md5 of file content
output = u_boot_console.run_command_list([
'mw.b %x 00 100' % ADDR,
'%sload host 0:0 %x /dir1/%s.w5' % (fs_type, ADDR, MIN_FILE),
'md5sum %x $filesize' % ADDR,
'setenv filesize'])
assert(md5val[2] in ''.join(output))
assert_fs_integrity(fs_type, fs_img)
def test_fs_ext6(self, u_boot_console, fs_obj_ext):
"""
Test Case 6 - write nothing at the start, truncating to zero
"""
fs_type,fs_img,md5val = fs_obj_ext
with u_boot_console.log.section('Test Case 6 - write nothing at the start, truncating to zero'):
# Test Case 6a - Check if command successfully returned
output = u_boot_console.run_command_list([
'host bind 0 %s' % fs_img,
'%sload host 0:0 %x /%s' % (fs_type, ADDR, MIN_FILE),
'%swrite host 0:0 %x /dir1/%s.w6 $filesize'
% (fs_type, ADDR, MIN_FILE)])
output = u_boot_console.run_command(
'%swrite host 0:0 %x /dir1/%s.w6 0 0'
% (fs_type, ADDR, MIN_FILE))
assert('0 bytes written' in output)
# Test Case 6b - Check size of written file
output = u_boot_console.run_command_list([
'%ssize host 0:0 /dir1/%s.w6' % (fs_type, MIN_FILE),
'printenv filesize',
'setenv filesize'])
assert('filesize=0' in ''.join(output))
assert_fs_integrity(fs_type, fs_img)
def test_fs_ext7(self, u_boot_console, fs_obj_ext):
"""
Test Case 7 - write at the end (append)
"""
fs_type,fs_img,md5val = fs_obj_ext
with u_boot_console.log.section('Test Case 7 - write at the end (append)'):
# Test Case 7a - Check if command successfully returned
output = u_boot_console.run_command_list([
'host bind 0 %s' % fs_img,
'%sload host 0:0 %x /%s' % (fs_type, ADDR, MIN_FILE),
'%swrite host 0:0 %x /dir1/%s.w7 $filesize'
% (fs_type, ADDR, MIN_FILE)])
output = u_boot_console.run_command(
'%swrite host 0:0 %x /dir1/%s.w7 $filesize $filesize'
% (fs_type, ADDR, MIN_FILE))
assert('20480 bytes written' in output)
# Test Case 7b - Check size of written file
output = u_boot_console.run_command_list([
'%ssize host 0:0 /dir1/%s.w7' % (fs_type, MIN_FILE),
'printenv filesize',
'setenv filesize'])
assert('filesize=a000' in ''.join(output))
# Test Case 7c - Check md5 of file content
output = u_boot_console.run_command_list([
'mw.b %x 00 100' % ADDR,
'%sload host 0:0 %x /dir1/%s.w7' % (fs_type, ADDR, MIN_FILE),
'md5sum %x $filesize' % ADDR,
'setenv filesize'])
assert(md5val[3] in ''.join(output))
assert_fs_integrity(fs_type, fs_img)
def test_fs_ext8(self, u_boot_console, fs_obj_ext):
"""
Test Case 8 - write at offset beyond the end of file
"""
fs_type,fs_img,md5val = fs_obj_ext
with u_boot_console.log.section('Test Case 8 - write beyond the end'):
# Test Case 8a - Check if command expectedly failed
output = u_boot_console.run_command_list([
'host bind 0 %s' % fs_img,
'%sload host 0:0 %x /%s' % (fs_type, ADDR, MIN_FILE),
'%swrite host 0:0 %x /dir1/%s.w8 $filesize'
% (fs_type, ADDR, MIN_FILE)])
output = u_boot_console.run_command(
'%swrite host 0:0 %x /dir1/%s.w8 0x1400 %x'
% (fs_type, ADDR, MIN_FILE, 0x100000 + 0x1400))
assert('Unable to write file /dir1' in output)
assert_fs_integrity(fs_type, fs_img)
def test_fs_ext9(self, u_boot_console, fs_obj_ext):
"""
Test Case 9 - write to a non-existing file at non-zero offset
"""
fs_type,fs_img,md5val = fs_obj_ext
with u_boot_console.log.section('Test Case 9 - write to non-existing file with non-zero offset'):
# Test Case 9a - Check if command expectedly failed
output = u_boot_console.run_command_list([
'host bind 0 %s' % fs_img,
'%sload host 0:0 %x /%s' % (fs_type, ADDR, MIN_FILE),
'%swrite host 0:0 %x /dir1/%s.w9 0x1400 0x1400'
% (fs_type, ADDR, MIN_FILE)])
assert('Unable to write file /dir1' in ''.join(output))
assert_fs_integrity(fs_type, fs_img)
def test_fs_ext10(self, u_boot_console, fs_obj_ext):
"""
'Test Case 10 - create/delete as many directories under root directory
as amount of directory entries goes beyond one cluster size)'
"""
fs_type,fs_img,md5val = fs_obj_ext
with u_boot_console.log.section('Test Case 10 - create/delete (many)'):
# Test Case 10a - Create many files
# Please note that the size of directory entry is 32 bytes.
# So one typical cluster may holds 64 (2048/32) entries.
output = u_boot_console.run_command(
'host bind 0 %s' % fs_img)
for i in range(0, 66):
output = u_boot_console.run_command(
'%swrite host 0:0 %x /FILE0123456789_%02x 100'
% (fs_type, ADDR, i))
output = u_boot_console.run_command('%sls host 0:0 /' % fs_type)
assert('FILE0123456789_00' in output)
assert('FILE0123456789_41' in output)
# Test Case 10b - Delete many files
for i in range(0, 66):
output = u_boot_console.run_command(
'%srm host 0:0 /FILE0123456789_%02x'
% (fs_type, i))
output = u_boot_console.run_command('%sls host 0:0 /' % fs_type)
assert(not 'FILE0123456789_00' in output)
assert(not 'FILE0123456789_41' in output)
# Test Case 10c - Create many files again
# Please note no.64 and 65 are intentionally re-created
for i in range(64, 128):
output = u_boot_console.run_command(
'%swrite host 0:0 %x /FILE0123456789_%02x 100'
% (fs_type, ADDR, i))
output = u_boot_console.run_command('%sls host 0:0 /' % fs_type)
assert('FILE0123456789_40' in output)
assert('FILE0123456789_79' in output)
assert_fs_integrity(fs_type, fs_img)
def test_fs_ext11(self, u_boot_console, fs_obj_ext):
"""
'Test Case 11 - create/delete as many directories under non-root
directory as amount of directory entries goes beyond one cluster size)'
"""
fs_type,fs_img,md5val = fs_obj_ext
with u_boot_console.log.section('Test Case 11 - create/delete (many)'):
# Test Case 11a - Create many files
# Please note that the size of directory entry is 32 bytes.
# So one typical cluster may holds 64 (2048/32) entries.
output = u_boot_console.run_command(
'host bind 0 %s' % fs_img)
for i in range(0, 66):
output = u_boot_console.run_command(
'%swrite host 0:0 %x /dir1/FILE0123456789_%02x 100'
% (fs_type, ADDR, i))
output = u_boot_console.run_command('%sls host 0:0 /dir1' % fs_type)
assert('FILE0123456789_00' in output)
assert('FILE0123456789_41' in output)
# Test Case 11b - Delete many files
for i in range(0, 66):
output = u_boot_console.run_command(
'%srm host 0:0 /dir1/FILE0123456789_%02x'
% (fs_type, i))
output = u_boot_console.run_command('%sls host 0:0 /dir1' % fs_type)
assert(not 'FILE0123456789_00' in output)
assert(not 'FILE0123456789_41' in output)
# Test Case 11c - Create many files again
# Please note no.64 and 65 are intentionally re-created
for i in range(64, 128):
output = u_boot_console.run_command(
'%swrite host 0:0 %x /dir1/FILE0123456789_%02x 100'
% (fs_type, ADDR, i))
output = u_boot_console.run_command('%sls host 0:0 /dir1' % fs_type)
assert('FILE0123456789_40' in output)
assert('FILE0123456789_79' in output)
assert_fs_integrity(fs_type, fs_img)
| 46.33125
| 105
| 0.558276
| 2,006
| 14,826
| 3.919741
| 0.110668
| 0.050362
| 0.090042
| 0.084701
| 0.902455
| 0.878672
| 0.859977
| 0.852982
| 0.840392
| 0.795625
| 0
| 0.065634
| 0.33819
| 14,826
| 319
| 106
| 46.476489
| 0.735732
| 0.160124
| 0
| 0.689815
| 0
| 0
| 0.236636
| 0.006177
| 0
| 0
| 0.004118
| 0
| 0.194444
| 1
| 0.050926
| false
| 0
| 0.018519
| 0
| 0.074074
| 0.018519
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dbb639e2fd4cf68a4aad7b5044ff4d71fb8ef7dd
| 172
|
py
|
Python
|
artworks/admin.py
|
chschtsch/kiuss
|
4c2114fd777a89b79b5620d8d1b596b657d26328
|
[
"MIT"
] | 1
|
2016-01-05T15:11:26.000Z
|
2016-01-05T15:11:26.000Z
|
artworks/admin.py
|
malerstudio/kiuss
|
4c2114fd777a89b79b5620d8d1b596b657d26328
|
[
"MIT"
] | null | null | null |
artworks/admin.py
|
malerstudio/kiuss
|
4c2114fd777a89b79b5620d8d1b596b657d26328
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import *
admin.site.register(Artwork)
admin.site.register(Category)
admin.site.register(Artist)
admin.site.register(Project)
| 21.5
| 32
| 0.813953
| 24
| 172
| 5.833333
| 0.5
| 0.257143
| 0.485714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075581
| 172
| 7
| 33
| 24.571429
| 0.880503
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
dbbd0f94d0ff6421fe1d26c1e2249a3e72a92068
| 281
|
py
|
Python
|
tests/test_ai_2048_1.py
|
Dratui/AI-Arena
|
e9693e34a90523bbb86eb2ad3b2c3e9797beed5c
|
[
"MIT"
] | 2
|
2018-11-16T08:18:42.000Z
|
2018-11-22T08:44:10.000Z
|
tests/test_ai_2048_1.py
|
Dratui/2048_online
|
e9693e34a90523bbb86eb2ad3b2c3e9797beed5c
|
[
"MIT"
] | 15
|
2018-11-16T10:52:24.000Z
|
2018-11-23T08:36:17.000Z
|
tests/test_ai_2048_1.py
|
Dratui/AI-Arena
|
e9693e34a90523bbb86eb2ad3b2c3e9797beed5c
|
[
"MIT"
] | 2
|
2018-11-15T09:32:36.000Z
|
2018-11-16T08:56:54.000Z
|
import ai.ai_2048_1 as AI
from pytest import *
import src.games.games as Games
def test_ai_output():
game = Games.init_game("2048")
assert AI.ai_output([[None, None, None, None], [None, None, None, None], [None, None, None, None], [2, None, None, 2]], game) in [0,1,2,3]
| 31.222222
| 142
| 0.66548
| 51
| 281
| 3.54902
| 0.392157
| 0.530387
| 0.662983
| 0.79558
| 0.265193
| 0.265193
| 0.265193
| 0.265193
| 0.265193
| 0.265193
| 0
| 0.064103
| 0.16726
| 281
| 8
| 143
| 35.125
| 0.709402
| 0
| 0
| 0
| 0
| 0
| 0.014286
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.166667
| false
| 0
| 0.5
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9171d12092b2982c16efc94c575d4b0a8958f895
| 37,889
|
py
|
Python
|
instances/passenger_demand/pas-20210421-2109-int14000000000000001e/18.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int14000000000000001e/18.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int14000000000000001e/18.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
"""
PASSENGERS
"""
numPassengers = 3191
passenger_arriving = (
(5, 8, 7, 5, 1, 0, 3, 5, 8, 3, 0, 0), # 0
(2, 8, 13, 5, 0, 0, 12, 10, 6, 1, 0, 0), # 1
(1, 10, 5, 4, 2, 0, 13, 13, 5, 2, 3, 0), # 2
(5, 11, 16, 2, 3, 0, 8, 0, 6, 3, 4, 0), # 3
(5, 8, 6, 2, 1, 0, 7, 10, 6, 5, 1, 0), # 4
(1, 10, 8, 5, 1, 0, 3, 10, 4, 2, 2, 0), # 5
(6, 8, 4, 5, 2, 0, 6, 6, 4, 4, 2, 0), # 6
(5, 9, 5, 2, 1, 0, 10, 12, 2, 5, 3, 0), # 7
(6, 3, 5, 3, 1, 0, 8, 7, 3, 6, 0, 0), # 8
(5, 7, 5, 5, 2, 0, 6, 6, 5, 4, 2, 0), # 9
(5, 11, 8, 3, 0, 0, 5, 8, 5, 4, 1, 0), # 10
(7, 5, 9, 6, 3, 0, 4, 5, 11, 6, 2, 0), # 11
(6, 7, 11, 2, 1, 0, 7, 6, 14, 8, 1, 0), # 12
(3, 6, 10, 4, 2, 0, 10, 9, 9, 2, 3, 0), # 13
(2, 8, 3, 4, 4, 0, 6, 12, 1, 2, 2, 0), # 14
(4, 12, 10, 7, 1, 0, 5, 16, 9, 7, 1, 0), # 15
(2, 11, 5, 1, 1, 0, 8, 9, 5, 6, 0, 0), # 16
(0, 10, 9, 4, 5, 0, 4, 8, 3, 4, 1, 0), # 17
(2, 7, 6, 3, 3, 0, 5, 12, 7, 6, 5, 0), # 18
(5, 8, 4, 5, 1, 0, 11, 9, 4, 3, 2, 0), # 19
(0, 9, 4, 2, 1, 0, 9, 13, 5, 3, 2, 0), # 20
(2, 8, 7, 2, 3, 0, 3, 11, 9, 2, 4, 0), # 21
(4, 11, 8, 5, 1, 0, 7, 8, 4, 6, 3, 0), # 22
(3, 4, 6, 0, 3, 0, 4, 11, 4, 7, 0, 0), # 23
(6, 14, 9, 3, 1, 0, 5, 5, 9, 6, 5, 0), # 24
(4, 8, 4, 7, 5, 0, 8, 6, 7, 3, 2, 0), # 25
(4, 8, 14, 2, 7, 0, 8, 9, 9, 4, 5, 0), # 26
(3, 5, 9, 8, 1, 0, 10, 6, 7, 5, 3, 0), # 27
(5, 13, 5, 3, 3, 0, 4, 10, 10, 4, 1, 0), # 28
(5, 8, 7, 3, 5, 0, 10, 10, 3, 0, 0, 0), # 29
(1, 10, 3, 3, 4, 0, 10, 10, 3, 7, 3, 0), # 30
(8, 6, 10, 5, 0, 0, 6, 9, 4, 4, 1, 0), # 31
(3, 10, 9, 2, 1, 0, 8, 10, 3, 5, 2, 0), # 32
(4, 9, 5, 1, 3, 0, 3, 7, 6, 6, 2, 0), # 33
(7, 6, 5, 6, 4, 0, 10, 9, 8, 2, 1, 0), # 34
(6, 6, 6, 5, 2, 0, 4, 5, 5, 8, 2, 0), # 35
(3, 15, 7, 4, 4, 0, 7, 8, 10, 5, 4, 0), # 36
(4, 15, 11, 4, 2, 0, 9, 11, 3, 9, 4, 0), # 37
(3, 13, 5, 3, 4, 0, 8, 8, 3, 6, 3, 0), # 38
(2, 5, 8, 1, 0, 0, 5, 7, 3, 8, 5, 0), # 39
(6, 8, 11, 5, 3, 0, 10, 13, 4, 4, 1, 0), # 40
(4, 9, 6, 6, 0, 0, 6, 9, 3, 1, 0, 0), # 41
(2, 10, 8, 3, 4, 0, 10, 7, 5, 1, 0, 0), # 42
(4, 17, 7, 3, 3, 0, 7, 6, 6, 3, 1, 0), # 43
(6, 13, 3, 2, 2, 0, 5, 11, 7, 5, 4, 0), # 44
(3, 11, 6, 2, 2, 0, 3, 5, 5, 3, 2, 0), # 45
(7, 8, 9, 4, 0, 0, 10, 8, 6, 5, 3, 0), # 46
(3, 10, 10, 8, 2, 0, 6, 10, 7, 7, 3, 0), # 47
(4, 10, 5, 8, 1, 0, 6, 9, 8, 6, 2, 0), # 48
(3, 12, 9, 2, 2, 0, 3, 11, 8, 3, 0, 0), # 49
(5, 13, 6, 5, 1, 0, 5, 7, 2, 3, 5, 0), # 50
(4, 9, 7, 5, 2, 0, 4, 9, 6, 5, 4, 0), # 51
(3, 8, 4, 3, 3, 0, 4, 7, 6, 3, 8, 0), # 52
(6, 10, 8, 6, 2, 0, 3, 6, 8, 4, 2, 0), # 53
(0, 11, 11, 5, 4, 0, 8, 8, 6, 5, 8, 0), # 54
(1, 9, 4, 3, 2, 0, 5, 8, 3, 5, 2, 0), # 55
(5, 7, 7, 3, 3, 0, 3, 9, 5, 3, 2, 0), # 56
(4, 7, 10, 6, 5, 0, 6, 12, 5, 5, 3, 0), # 57
(3, 6, 7, 6, 3, 0, 2, 8, 6, 7, 3, 0), # 58
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # 59
)
station_arriving_intensity = (
(3.7095121817383676, 9.515044981060607, 11.19193043059126, 8.87078804347826, 10.000240384615385, 6.659510869565219), # 0
(3.7443308140669203, 9.620858238197952, 11.252381752534994, 8.920190141908213, 10.075193108974359, 6.657240994867151), # 1
(3.7787518681104277, 9.725101964085297, 11.31139817195087, 8.968504830917876, 10.148564102564103, 6.654901690821256), # 2
(3.8127461259877085, 9.827663671875001, 11.368936576156813, 9.01569089673913, 10.22028605769231, 6.652493274456523), # 3
(3.8462843698175795, 9.928430874719417, 11.424953852470724, 9.061707125603865, 10.290291666666668, 6.6500160628019325), # 4
(3.879337381718857, 10.027291085770905, 11.479406888210512, 9.106512303743962, 10.358513621794872, 6.647470372886473), # 5
(3.9118759438103607, 10.12413181818182, 11.53225257069409, 9.150065217391306, 10.424884615384617, 6.644856521739131), # 6
(3.943870838210907, 10.218840585104518, 11.58344778723936, 9.19232465277778, 10.489337339743592, 6.64217482638889), # 7
(3.975292847039314, 10.311304899691358, 11.632949425164242, 9.233249396135266, 10.551804487179488, 6.639425603864735), # 8
(4.006112752414399, 10.401412275094698, 11.680714371786634, 9.272798233695653, 10.61221875, 6.636609171195653), # 9
(4.03630133645498, 10.489050224466892, 11.72669951442445, 9.310929951690824, 10.670512820512823, 6.633725845410628), # 10
(4.065829381279876, 10.5741062609603, 11.7708617403956, 9.347603336352659, 10.726619391025642, 6.630775943538648), # 11
(4.094667669007903, 10.656467897727273, 11.813157937017996, 9.382777173913043, 10.780471153846154, 6.627759782608695), # 12
(4.122786981757876, 10.736022647920176, 11.85354499160954, 9.416410250603866, 10.832000801282053, 6.624677679649759), # 13
(4.15015810164862, 10.81265802469136, 11.891979791488144, 9.448461352657004, 10.881141025641025, 6.621529951690821), # 14
(4.1767518107989465, 10.886261541193182, 11.928419223971721, 9.478889266304348, 10.92782451923077, 6.618316915760871), # 15
(4.202538891327675, 10.956720710578002, 11.96282017637818, 9.507652777777778, 10.971983974358976, 6.61503888888889), # 16
(4.227490125353625, 11.023923045998176, 11.995139536025421, 9.53471067330918, 11.013552083333336, 6.611696188103866), # 17
(4.25157629499561, 11.087756060606061, 12.025334190231364, 9.560021739130436, 11.052461538461543, 6.608289130434783), # 18
(4.274768182372451, 11.148107267554012, 12.053361026313912, 9.58354476147343, 11.088645032051284, 6.604818032910629), # 19
(4.297036569602966, 11.204864179994388, 12.079176931590974, 9.60523852657005, 11.122035256410259, 6.601283212560387), # 20
(4.318352238805971, 11.257914311079544, 12.102738793380466, 9.625061820652174, 11.152564903846153, 6.597684986413044), # 21
(4.338685972100283, 11.307145173961842, 12.124003499000287, 9.642973429951692, 11.180166666666667, 6.5940236714975855), # 22
(4.358008551604722, 11.352444281793632, 12.142927935768354, 9.658932140700484, 11.204773237179488, 6.590299584842997), # 23
(4.3762907594381035, 11.393699147727272, 12.159468991002571, 9.672896739130437, 11.226317307692307, 6.586513043478261), # 24
(4.393503377719247, 11.430797284915124, 12.173583552020853, 9.684826011473431, 11.244731570512819, 6.582664364432368), # 25
(4.409617188566969, 11.46362620650954, 12.185228506141103, 9.694678743961353, 11.259948717948719, 6.5787538647343), # 26
(4.424602974100088, 11.492073425662877, 12.194360740681233, 9.702413722826089, 11.271901442307694, 6.574781861413045), # 27
(4.438431516437421, 11.516026455527497, 12.200937142959157, 9.707989734299519, 11.280522435897437, 6.570748671497586), # 28
(4.4510735976977855, 11.535372809255753, 12.204914600292774, 9.711365564613528, 11.285744391025641, 6.566654612016909), # 29
(4.4625, 11.55, 12.20625, 9.7125, 11.287500000000001, 6.562500000000001), # 30
(4.47319183983376, 11.56215031960227, 12.205248928140096, 9.712295118464054, 11.286861125886526, 6.556726763701484), # 31
(4.4836528452685425, 11.574140056818184, 12.202274033816424, 9.711684477124184, 11.28495815602837, 6.547834661835751), # 32
(4.493887715792838, 11.585967720170455, 12.197367798913046, 9.710674080882354, 11.281811569148937, 6.535910757121439), # 33
(4.503901150895141, 11.597631818181819, 12.19057270531401, 9.709269934640524, 11.277441843971632, 6.521042112277196), # 34
(4.513697850063939, 11.609130859374998, 12.181931234903383, 9.707478043300654, 11.27186945921986, 6.503315790021656), # 35
(4.523282512787724, 11.62046335227273, 12.171485869565219, 9.705304411764708, 11.265114893617023, 6.482818853073463), # 36
(4.532659838554988, 11.631627805397729, 12.159279091183576, 9.70275504493464, 11.257198625886524, 6.4596383641512585), # 37
(4.5418345268542195, 11.642622727272729, 12.145353381642513, 9.699835947712419, 11.248141134751775, 6.433861385973679), # 38
(4.5508112771739135, 11.653446626420456, 12.129751222826087, 9.696553125000001, 11.23796289893617, 6.40557498125937), # 39
(4.559594789002558, 11.664098011363638, 12.11251509661836, 9.692912581699348, 11.22668439716312, 6.37486621272697), # 40
(4.568189761828645, 11.674575390625, 12.093687484903382, 9.68892032271242, 11.214326108156028, 6.34182214309512), # 41
(4.576600895140665, 11.684877272727276, 12.07331086956522, 9.684582352941177, 11.2009085106383, 6.3065298350824595), # 42
(4.584832888427111, 11.69500216619318, 12.051427732487923, 9.679904677287583, 11.186452083333334, 6.26907635140763), # 43
(4.592890441176471, 11.704948579545455, 12.028080555555556, 9.674893300653595, 11.17097730496454, 6.229548754789272), # 44
(4.600778252877237, 11.714715021306818, 12.003311820652177, 9.669554227941177, 11.15450465425532, 6.188034107946028), # 45
(4.6085010230179035, 11.724300000000003, 11.97716400966184, 9.663893464052288, 11.137054609929079, 6.144619473596536), # 46
(4.616063451086957, 11.733702024147728, 11.9496796044686, 9.65791701388889, 11.118647650709221, 6.099391914459438), # 47
(4.623470236572891, 11.742919602272728, 11.920901086956523, 9.651630882352942, 11.099304255319149, 6.052438493253375), # 48
(4.630726078964194, 11.751951242897727, 11.890870939009663, 9.645041074346407, 11.079044902482272, 6.003846272696985), # 49
(4.6378356777493615, 11.760795454545454, 11.85963164251208, 9.638153594771243, 11.057890070921987, 5.953702315508913), # 50
(4.6448037324168805, 11.769450745738636, 11.827225679347826, 9.630974448529413, 11.035860239361703, 5.902093684407797), # 51
(4.651634942455243, 11.777915625, 11.793695531400965, 9.623509640522876, 11.012975886524824, 5.849107442112278), # 52
(4.658334007352941, 11.786188600852274, 11.759083680555555, 9.615765175653596, 10.989257491134753, 5.794830651340996), # 53
(4.6649056265984665, 11.79426818181818, 11.723432608695653, 9.60774705882353, 10.964725531914894, 5.739350374812594), # 54
(4.671354499680307, 11.802152876420456, 11.686784797705313, 9.599461294934642, 10.939400487588653, 5.682753675245711), # 55
(4.677685326086957, 11.809841193181818, 11.649182729468599, 9.59091388888889, 10.913302836879433, 5.625127615358988), # 56
(4.683902805306906, 11.817331640625003, 11.610668885869565, 9.582110845588236, 10.886453058510638, 5.566559257871065), # 57
(4.690011636828645, 11.824622727272727, 11.57128574879227, 9.573058169934642, 10.858871631205675, 5.507135665500583), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_arriving_acc = (
(5, 8, 7, 5, 1, 0, 3, 5, 8, 3, 0, 0), # 0
(7, 16, 20, 10, 1, 0, 15, 15, 14, 4, 0, 0), # 1
(8, 26, 25, 14, 3, 0, 28, 28, 19, 6, 3, 0), # 2
(13, 37, 41, 16, 6, 0, 36, 28, 25, 9, 7, 0), # 3
(18, 45, 47, 18, 7, 0, 43, 38, 31, 14, 8, 0), # 4
(19, 55, 55, 23, 8, 0, 46, 48, 35, 16, 10, 0), # 5
(25, 63, 59, 28, 10, 0, 52, 54, 39, 20, 12, 0), # 6
(30, 72, 64, 30, 11, 0, 62, 66, 41, 25, 15, 0), # 7
(36, 75, 69, 33, 12, 0, 70, 73, 44, 31, 15, 0), # 8
(41, 82, 74, 38, 14, 0, 76, 79, 49, 35, 17, 0), # 9
(46, 93, 82, 41, 14, 0, 81, 87, 54, 39, 18, 0), # 10
(53, 98, 91, 47, 17, 0, 85, 92, 65, 45, 20, 0), # 11
(59, 105, 102, 49, 18, 0, 92, 98, 79, 53, 21, 0), # 12
(62, 111, 112, 53, 20, 0, 102, 107, 88, 55, 24, 0), # 13
(64, 119, 115, 57, 24, 0, 108, 119, 89, 57, 26, 0), # 14
(68, 131, 125, 64, 25, 0, 113, 135, 98, 64, 27, 0), # 15
(70, 142, 130, 65, 26, 0, 121, 144, 103, 70, 27, 0), # 16
(70, 152, 139, 69, 31, 0, 125, 152, 106, 74, 28, 0), # 17
(72, 159, 145, 72, 34, 0, 130, 164, 113, 80, 33, 0), # 18
(77, 167, 149, 77, 35, 0, 141, 173, 117, 83, 35, 0), # 19
(77, 176, 153, 79, 36, 0, 150, 186, 122, 86, 37, 0), # 20
(79, 184, 160, 81, 39, 0, 153, 197, 131, 88, 41, 0), # 21
(83, 195, 168, 86, 40, 0, 160, 205, 135, 94, 44, 0), # 22
(86, 199, 174, 86, 43, 0, 164, 216, 139, 101, 44, 0), # 23
(92, 213, 183, 89, 44, 0, 169, 221, 148, 107, 49, 0), # 24
(96, 221, 187, 96, 49, 0, 177, 227, 155, 110, 51, 0), # 25
(100, 229, 201, 98, 56, 0, 185, 236, 164, 114, 56, 0), # 26
(103, 234, 210, 106, 57, 0, 195, 242, 171, 119, 59, 0), # 27
(108, 247, 215, 109, 60, 0, 199, 252, 181, 123, 60, 0), # 28
(113, 255, 222, 112, 65, 0, 209, 262, 184, 123, 60, 0), # 29
(114, 265, 225, 115, 69, 0, 219, 272, 187, 130, 63, 0), # 30
(122, 271, 235, 120, 69, 0, 225, 281, 191, 134, 64, 0), # 31
(125, 281, 244, 122, 70, 0, 233, 291, 194, 139, 66, 0), # 32
(129, 290, 249, 123, 73, 0, 236, 298, 200, 145, 68, 0), # 33
(136, 296, 254, 129, 77, 0, 246, 307, 208, 147, 69, 0), # 34
(142, 302, 260, 134, 79, 0, 250, 312, 213, 155, 71, 0), # 35
(145, 317, 267, 138, 83, 0, 257, 320, 223, 160, 75, 0), # 36
(149, 332, 278, 142, 85, 0, 266, 331, 226, 169, 79, 0), # 37
(152, 345, 283, 145, 89, 0, 274, 339, 229, 175, 82, 0), # 38
(154, 350, 291, 146, 89, 0, 279, 346, 232, 183, 87, 0), # 39
(160, 358, 302, 151, 92, 0, 289, 359, 236, 187, 88, 0), # 40
(164, 367, 308, 157, 92, 0, 295, 368, 239, 188, 88, 0), # 41
(166, 377, 316, 160, 96, 0, 305, 375, 244, 189, 88, 0), # 42
(170, 394, 323, 163, 99, 0, 312, 381, 250, 192, 89, 0), # 43
(176, 407, 326, 165, 101, 0, 317, 392, 257, 197, 93, 0), # 44
(179, 418, 332, 167, 103, 0, 320, 397, 262, 200, 95, 0), # 45
(186, 426, 341, 171, 103, 0, 330, 405, 268, 205, 98, 0), # 46
(189, 436, 351, 179, 105, 0, 336, 415, 275, 212, 101, 0), # 47
(193, 446, 356, 187, 106, 0, 342, 424, 283, 218, 103, 0), # 48
(196, 458, 365, 189, 108, 0, 345, 435, 291, 221, 103, 0), # 49
(201, 471, 371, 194, 109, 0, 350, 442, 293, 224, 108, 0), # 50
(205, 480, 378, 199, 111, 0, 354, 451, 299, 229, 112, 0), # 51
(208, 488, 382, 202, 114, 0, 358, 458, 305, 232, 120, 0), # 52
(214, 498, 390, 208, 116, 0, 361, 464, 313, 236, 122, 0), # 53
(214, 509, 401, 213, 120, 0, 369, 472, 319, 241, 130, 0), # 54
(215, 518, 405, 216, 122, 0, 374, 480, 322, 246, 132, 0), # 55
(220, 525, 412, 219, 125, 0, 377, 489, 327, 249, 134, 0), # 56
(224, 532, 422, 225, 130, 0, 383, 501, 332, 254, 137, 0), # 57
(227, 538, 429, 231, 133, 0, 385, 509, 338, 261, 140, 0), # 58
(227, 538, 429, 231, 133, 0, 385, 509, 338, 261, 140, 0), # 59
)
passenger_arriving_rate = (
(3.7095121817383676, 7.612035984848484, 6.715158258354756, 3.5483152173913037, 2.000048076923077, 0.0, 6.659510869565219, 8.000192307692307, 5.322472826086956, 4.476772172236504, 1.903008996212121, 0.0), # 0
(3.7443308140669203, 7.696686590558361, 6.751429051520996, 3.5680760567632848, 2.0150386217948717, 0.0, 6.657240994867151, 8.060154487179487, 5.352114085144928, 4.500952701013997, 1.9241716476395903, 0.0), # 1
(3.7787518681104277, 7.780081571268237, 6.786838903170522, 3.58740193236715, 2.0297128205128203, 0.0, 6.654901690821256, 8.118851282051281, 5.381102898550726, 4.524559268780347, 1.9450203928170593, 0.0), # 2
(3.8127461259877085, 7.8621309375, 6.821361945694087, 3.6062763586956517, 2.044057211538462, 0.0, 6.652493274456523, 8.176228846153847, 5.409414538043478, 4.547574630462725, 1.965532734375, 0.0), # 3
(3.8462843698175795, 7.942744699775533, 6.854972311482434, 3.624682850241546, 2.0580583333333333, 0.0, 6.6500160628019325, 8.232233333333333, 5.437024275362319, 4.569981540988289, 1.9856861749438832, 0.0), # 4
(3.879337381718857, 8.021832868616723, 6.887644132926307, 3.6426049214975844, 2.0717027243589743, 0.0, 6.647470372886473, 8.286810897435897, 5.463907382246377, 4.591762755284204, 2.005458217154181, 0.0), # 5
(3.9118759438103607, 8.099305454545455, 6.919351542416455, 3.660026086956522, 2.084976923076923, 0.0, 6.644856521739131, 8.339907692307692, 5.490039130434783, 4.612901028277636, 2.0248263636363637, 0.0), # 6
(3.943870838210907, 8.175072468083613, 6.950068672343615, 3.6769298611111116, 2.0978674679487184, 0.0, 6.64217482638889, 8.391469871794873, 5.515394791666668, 4.633379114895743, 2.043768117020903, 0.0), # 7
(3.975292847039314, 8.249043919753085, 6.979769655098544, 3.693299758454106, 2.1103608974358976, 0.0, 6.639425603864735, 8.44144358974359, 5.5399496376811594, 4.653179770065696, 2.062260979938271, 0.0), # 8
(4.006112752414399, 8.321129820075758, 7.00842862307198, 3.709119293478261, 2.12244375, 0.0, 6.636609171195653, 8.489775, 5.563678940217391, 4.672285748714653, 2.0802824550189394, 0.0), # 9
(4.03630133645498, 8.391240179573513, 7.03601970865467, 3.724371980676329, 2.134102564102564, 0.0, 6.633725845410628, 8.536410256410257, 5.586557971014494, 4.690679805769779, 2.0978100448933783, 0.0), # 10
(4.065829381279876, 8.459285008768239, 7.06251704423736, 3.739041334541063, 2.145323878205128, 0.0, 6.630775943538648, 8.581295512820512, 5.608562001811595, 4.70834469615824, 2.1148212521920597, 0.0), # 11
(4.094667669007903, 8.525174318181818, 7.087894762210797, 3.7531108695652167, 2.156094230769231, 0.0, 6.627759782608695, 8.624376923076923, 5.6296663043478254, 4.725263174807198, 2.1312935795454546, 0.0), # 12
(4.122786981757876, 8.58881811833614, 7.112126994965724, 3.766564100241546, 2.1664001602564102, 0.0, 6.624677679649759, 8.665600641025641, 5.649846150362319, 4.741417996643816, 2.147204529584035, 0.0), # 13
(4.15015810164862, 8.650126419753088, 7.135187874892886, 3.779384541062801, 2.1762282051282047, 0.0, 6.621529951690821, 8.704912820512819, 5.669076811594202, 4.756791916595257, 2.162531604938272, 0.0), # 14
(4.1767518107989465, 8.709009232954545, 7.157051534383032, 3.7915557065217387, 2.1855649038461538, 0.0, 6.618316915760871, 8.742259615384615, 5.6873335597826085, 4.771367689588688, 2.177252308238636, 0.0), # 15
(4.202538891327675, 8.7653765684624, 7.177692105826908, 3.803061111111111, 2.194396794871795, 0.0, 6.61503888888889, 8.77758717948718, 5.7045916666666665, 4.785128070551272, 2.1913441421156, 0.0), # 16
(4.227490125353625, 8.81913843679854, 7.197083721615253, 3.8138842693236716, 2.202710416666667, 0.0, 6.611696188103866, 8.810841666666668, 5.720826403985508, 4.798055814410168, 2.204784609199635, 0.0), # 17
(4.25157629499561, 8.870204848484848, 7.215200514138818, 3.824008695652174, 2.2104923076923084, 0.0, 6.608289130434783, 8.841969230769234, 5.736013043478262, 4.810133676092545, 2.217551212121212, 0.0), # 18
(4.274768182372451, 8.918485814043208, 7.232016615788346, 3.8334179045893717, 2.2177290064102566, 0.0, 6.604818032910629, 8.870916025641026, 5.750126856884058, 4.8213444105255645, 2.229621453510802, 0.0), # 19
(4.297036569602966, 8.96389134399551, 7.247506158954584, 3.8420954106280196, 2.2244070512820517, 0.0, 6.601283212560387, 8.897628205128207, 5.76314311594203, 4.831670772636389, 2.2409728359988774, 0.0), # 20
(4.318352238805971, 9.006331448863634, 7.261643276028279, 3.8500247282608693, 2.2305129807692303, 0.0, 6.597684986413044, 8.922051923076921, 5.775037092391305, 4.841095517352186, 2.2515828622159084, 0.0), # 21
(4.338685972100283, 9.045716139169473, 7.274402099400172, 3.8571893719806765, 2.2360333333333333, 0.0, 6.5940236714975855, 8.944133333333333, 5.785784057971015, 4.849601399600115, 2.2614290347923682, 0.0), # 22
(4.358008551604722, 9.081955425434906, 7.285756761461012, 3.8635728562801934, 2.2409546474358972, 0.0, 6.590299584842997, 8.963818589743589, 5.79535928442029, 4.857171174307341, 2.2704888563587264, 0.0), # 23
(4.3762907594381035, 9.114959318181818, 7.295681394601543, 3.869158695652174, 2.2452634615384612, 0.0, 6.586513043478261, 8.981053846153845, 5.803738043478262, 4.863787596401028, 2.2787398295454544, 0.0), # 24
(4.393503377719247, 9.1446378279321, 7.304150131212511, 3.8739304045893723, 2.2489463141025636, 0.0, 6.582664364432368, 8.995785256410255, 5.810895606884059, 4.869433420808341, 2.286159456983025, 0.0), # 25
(4.409617188566969, 9.17090096520763, 7.311137103684661, 3.8778714975845405, 2.2519897435897436, 0.0, 6.5787538647343, 9.007958974358974, 5.816807246376811, 4.874091402456441, 2.2927252413019077, 0.0), # 26
(4.424602974100088, 9.193658740530301, 7.31661644440874, 3.880965489130435, 2.2543802884615385, 0.0, 6.574781861413045, 9.017521153846154, 5.821448233695653, 4.877744296272493, 2.2984146851325753, 0.0), # 27
(4.438431516437421, 9.212821164421996, 7.320562285775494, 3.8831958937198072, 2.256104487179487, 0.0, 6.570748671497586, 9.024417948717948, 5.824793840579711, 4.8803748571836625, 2.303205291105499, 0.0), # 28
(4.4510735976977855, 9.228298247404602, 7.322948760175664, 3.884546225845411, 2.257148878205128, 0.0, 6.566654612016909, 9.028595512820512, 5.826819338768117, 4.881965840117109, 2.3070745618511506, 0.0), # 29
(4.4625, 9.24, 7.32375, 3.885, 2.2575000000000003, 0.0, 6.562500000000001, 9.030000000000001, 5.8275, 4.8825, 2.31, 0.0), # 30
(4.47319183983376, 9.249720255681815, 7.323149356884057, 3.884918047385621, 2.257372225177305, 0.0, 6.556726763701484, 9.02948890070922, 5.827377071078432, 4.882099571256038, 2.312430063920454, 0.0), # 31
(4.4836528452685425, 9.259312045454546, 7.3213644202898545, 3.884673790849673, 2.2569916312056737, 0.0, 6.547834661835751, 9.027966524822695, 5.82701068627451, 4.880909613526569, 2.3148280113636366, 0.0), # 32
(4.493887715792838, 9.268774176136363, 7.3184206793478275, 3.8842696323529413, 2.2563623138297872, 0.0, 6.535910757121439, 9.025449255319149, 5.826404448529412, 4.878947119565218, 2.3171935440340907, 0.0), # 33
(4.503901150895141, 9.278105454545454, 7.314343623188405, 3.8837079738562093, 2.2554883687943263, 0.0, 6.521042112277196, 9.021953475177305, 5.825561960784314, 4.876229082125604, 2.3195263636363634, 0.0), # 34
(4.513697850063939, 9.287304687499997, 7.3091587409420296, 3.882991217320261, 2.2543738918439717, 0.0, 6.503315790021656, 9.017495567375887, 5.824486825980392, 4.872772493961353, 2.3218261718749993, 0.0), # 35
(4.523282512787724, 9.296370681818182, 7.302891521739131, 3.8821217647058828, 2.253022978723404, 0.0, 6.482818853073463, 9.012091914893617, 5.823182647058824, 4.868594347826087, 2.3240926704545455, 0.0), # 36
(4.532659838554988, 9.305302244318183, 7.295567454710145, 3.881102017973856, 2.2514397251773044, 0.0, 6.4596383641512585, 9.005758900709218, 5.821653026960784, 4.86371163647343, 2.3263255610795457, 0.0), # 37
(4.5418345268542195, 9.314098181818181, 7.287212028985508, 3.8799343790849674, 2.249628226950355, 0.0, 6.433861385973679, 8.99851290780142, 5.819901568627452, 4.858141352657005, 2.3285245454545453, 0.0), # 38
(4.5508112771739135, 9.322757301136363, 7.277850733695652, 3.87862125, 2.247592579787234, 0.0, 6.40557498125937, 8.990370319148935, 5.817931875, 4.8519004891304345, 2.330689325284091, 0.0), # 39
(4.559594789002558, 9.33127840909091, 7.267509057971015, 3.8771650326797387, 2.245336879432624, 0.0, 6.37486621272697, 8.981347517730496, 5.815747549019608, 4.845006038647344, 2.3328196022727274, 0.0), # 40
(4.568189761828645, 9.3396603125, 7.256212490942029, 3.8755681290849675, 2.2428652216312055, 0.0, 6.34182214309512, 8.971460886524822, 5.813352193627452, 4.837474993961353, 2.334915078125, 0.0), # 41
(4.576600895140665, 9.34790181818182, 7.2439865217391315, 3.8738329411764707, 2.2401817021276598, 0.0, 6.3065298350824595, 8.960726808510639, 5.810749411764706, 4.829324347826088, 2.336975454545455, 0.0), # 42
(4.584832888427111, 9.356001732954544, 7.230856639492753, 3.8719618709150327, 2.2372904166666667, 0.0, 6.26907635140763, 8.949161666666667, 5.80794280637255, 4.820571092995169, 2.339000433238636, 0.0), # 43
(4.592890441176471, 9.363958863636363, 7.216848333333333, 3.8699573202614377, 2.2341954609929076, 0.0, 6.229548754789272, 8.93678184397163, 5.804935980392157, 4.811232222222222, 2.3409897159090907, 0.0), # 44
(4.600778252877237, 9.371772017045453, 7.201987092391306, 3.8678216911764705, 2.230900930851064, 0.0, 6.188034107946028, 8.923603723404256, 5.801732536764706, 4.80132472826087, 2.3429430042613633, 0.0), # 45
(4.6085010230179035, 9.379440000000002, 7.186298405797103, 3.8655573856209147, 2.2274109219858156, 0.0, 6.144619473596536, 8.909643687943262, 5.798336078431372, 4.790865603864735, 2.3448600000000006, 0.0), # 46
(4.616063451086957, 9.386961619318182, 7.16980776268116, 3.8631668055555552, 2.223729530141844, 0.0, 6.099391914459438, 8.894918120567375, 5.794750208333333, 4.77987184178744, 2.3467404048295455, 0.0), # 47
(4.623470236572891, 9.394335681818182, 7.152540652173913, 3.8606523529411763, 2.21986085106383, 0.0, 6.052438493253375, 8.87944340425532, 5.790978529411765, 4.7683604347826085, 2.3485839204545456, 0.0), # 48
(4.630726078964194, 9.401560994318181, 7.134522563405797, 3.8580164297385626, 2.2158089804964543, 0.0, 6.003846272696985, 8.863235921985817, 5.787024644607844, 4.7563483756038645, 2.3503902485795454, 0.0), # 49
(4.6378356777493615, 9.408636363636361, 7.115778985507247, 3.8552614379084966, 2.211578014184397, 0.0, 5.953702315508913, 8.846312056737588, 5.782892156862745, 4.743852657004831, 2.3521590909090904, 0.0), # 50
(4.6448037324168805, 9.415560596590907, 7.096335407608696, 3.852389779411765, 2.2071720478723407, 0.0, 5.902093684407797, 8.828688191489363, 5.778584669117648, 4.73089027173913, 2.353890149147727, 0.0), # 51
(4.651634942455243, 9.4223325, 7.0762173188405795, 3.84940385620915, 2.2025951773049646, 0.0, 5.849107442112278, 8.810380709219858, 5.774105784313726, 4.717478212560386, 2.355583125, 0.0), # 52
(4.658334007352941, 9.428950880681818, 7.055450208333333, 3.8463060702614382, 2.1978514982269504, 0.0, 5.794830651340996, 8.791405992907801, 5.769459105392158, 4.703633472222222, 2.3572377201704544, 0.0), # 53
(4.6649056265984665, 9.435414545454544, 7.034059565217391, 3.843098823529412, 2.192945106382979, 0.0, 5.739350374812594, 8.771780425531915, 5.764648235294119, 4.689373043478261, 2.358853636363636, 0.0), # 54
(4.671354499680307, 9.441722301136364, 7.012070878623187, 3.8397845179738566, 2.1878800975177306, 0.0, 5.682753675245711, 8.751520390070922, 5.759676776960785, 4.674713919082125, 2.360430575284091, 0.0), # 55
(4.677685326086957, 9.447872954545453, 6.989509637681159, 3.8363655555555556, 2.1826605673758865, 0.0, 5.625127615358988, 8.730642269503546, 5.754548333333334, 4.65967309178744, 2.361968238636363, 0.0), # 56
(4.683902805306906, 9.453865312500001, 6.966401331521738, 3.832844338235294, 2.1772906117021273, 0.0, 5.566559257871065, 8.70916244680851, 5.749266507352941, 4.644267554347826, 2.3634663281250003, 0.0), # 57
(4.690011636828645, 9.459698181818181, 6.942771449275362, 3.8292232679738563, 2.1717743262411346, 0.0, 5.507135665500583, 8.687097304964539, 5.743834901960785, 4.628514299516908, 2.3649245454545453, 0.0), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_allighting_rate = (
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 0
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 1
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 2
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 3
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 4
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 5
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 6
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 7
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 8
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 9
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 10
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 11
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 12
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 13
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 14
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 15
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 16
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 17
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 18
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 19
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 20
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 21
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 22
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 23
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 24
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 25
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 26
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 27
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 28
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 29
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 30
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 31
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 32
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 33
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 34
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 35
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 36
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 37
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 38
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 39
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 40
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 41
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 42
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 43
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 44
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 45
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 46
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 47
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 48
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 49
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 50
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 51
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 52
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 53
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 54
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 55
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 56
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 57
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 58
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 59
)
"""
parameters for reproducibiliy. More information: https://numpy.org/doc/stable/reference/random/parallel.html
"""
#initial entropy
entropy = 258194110137029475889902652135037600173
#index for seed sequence child
child_seed_index = (
1, # 0
17, # 1
)
| 113.101493
| 212
| 0.729103
| 5,147
| 37,889
| 5.365067
| 0.2279
| 0.312885
| 0.2477
| 0.469327
| 0.328964
| 0.327805
| 0.327805
| 0.327805
| 0.327805
| 0.327805
| 0
| 0.819026
| 0.119137
| 37,889
| 334
| 213
| 113.44012
| 0.00836
| 0.031962
| 0
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.015823
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
919547dc485ae55963f85a55d6a4a96bdb1a4fa1
| 7,444
|
py
|
Python
|
tests.py
|
bioinformatics-ua/redis-rw-lock
|
ddff802320b484419805529fed1e7262352fb39e
|
[
"MIT"
] | 7
|
2017-09-14T09:22:22.000Z
|
2021-03-15T15:43:06.000Z
|
tests.py
|
bioinformatics-ua/redis-rw-lock
|
ddff802320b484419805529fed1e7262352fb39e
|
[
"MIT"
] | null | null | null |
tests.py
|
bioinformatics-ua/redis-rw-lock
|
ddff802320b484419805529fed1e7262352fb39e
|
[
"MIT"
] | 2
|
2020-07-31T13:27:15.000Z
|
2020-09-24T10:03:42.000Z
|
# Author: Swapnil Mahajan
import unittest
import redis
import threading
import time
import copy
from redis_rw_lock import RWLock
class Writer(threading.Thread):
def __init__(self, buffer_, rw_lock, init_sleep_time, sleep_time, to_write):
"""
@param buffer_: common buffer_ shared by the readers and writers
@type buffer_: list
@type rw_lock: L{RWLock}
@param init_sleep_time: sleep time before doing any action
@type init_sleep_time: C{float}
@param sleep_time: sleep time while in critical section
@type sleep_time: C{float}
@param to_write: data that will be appended to the buffer
"""
threading.Thread.__init__(self)
self.__buffer = buffer_
self.__rw_lock = rw_lock
self.__init_sleep_time = init_sleep_time
self.__sleep_time = sleep_time
self.__to_write = to_write
self.entry_time = None
"""Time of entry to the critical section"""
self.exit_time = None
"""Time of exit from the critical section"""
def run(self):
time.sleep(self.__init_sleep_time)
self.__rw_lock.acquire()
self.entry_time = time.time()
time.sleep(self.__sleep_time)
self.__buffer.append(self.__to_write)
self.exit_time = time.time()
self.__rw_lock.release()
class Reader(threading.Thread):
def __init__(self, buffer_, rw_lock, init_sleep_time, sleep_time):
"""
@param buffer_: common buffer shared by the readers and writers
@type buffer_: list
@type rw_lock: L{RWLock}
@param init_sleep_time: sleep time before doing any action
@type init_sleep_time: C{float}
@param sleep_time: sleep time while in critical section
@type sleep_time: C{float}
"""
threading.Thread.__init__(self)
self.__buffer = buffer_
self.__rw_lock = rw_lock
self.__init_sleep_time = init_sleep_time
self.__sleep_time = sleep_time
self.buffer_read = None
"""a copy of a the buffer read while in critical section"""
self.entry_time = None
"""Time of entry to the critical section"""
self.exit_time = None
"""Time of exit from the critical section"""
def run(self):
time.sleep(self.__init_sleep_time)
self.__rw_lock.acquire()
self.entry_time = time.time()
time.sleep(self.__sleep_time)
self.buffer_read = copy.deepcopy(self.__buffer)
self.exit_time = time.time()
self.__rw_lock.release()
class RWLockTestCase(unittest.TestCase):
def test_readers_nonexclusive_access(self):
(buffer_, threads) = self.__init_variables()
threads.append(Reader(buffer_, self.__generate_reader_lock(), 0, 1))
threads.append(Writer(buffer_, self.__generate_writer_lock(), 0.4, 1, 1))
threads.append(Reader(buffer_, self.__generate_reader_lock(), 1, 1))
threads.append(Reader(buffer_, self.__generate_reader_lock(), 1.2, 0.2))
self.__start_and_join_threads(threads)
# The third reader should enter after the second one but it should
# exit before the second one exits
# (i.e. the readers should be in the critical section
# at the same time)
self.assertEqual([], threads[0].buffer_read)
self.assertEqual([1], threads[2].buffer_read)
self.assertEqual([1], threads[3].buffer_read)
self.assertTrue(threads[1].exit_time <= threads[2].entry_time)
self.assertTrue(threads[2].entry_time <= threads[3].entry_time)
self.assertTrue(threads[3].exit_time < threads[2].exit_time)
def test_writers_exclusive_access(self):
(buffer_, threads) = self.__init_variables()
threads.append(Writer(buffer_, self.__generate_writer_lock(), 0, 0.4, 1))
threads.append(Writer(buffer_, self.__generate_writer_lock(), 0.1, 0, 2))
threads.append(Reader(buffer_, self.__generate_reader_lock(), 0.2, 0))
self.__start_and_join_threads(threads)
# The second writer should wait for the first one to exit
self.assertEqual([1, 2], threads[2].buffer_read)
self.assertTrue(threads[0].exit_time <= threads[1].entry_time)
self.assertTrue(threads[1].exit_time <= threads[2].exit_time)
def test_writer_priority(self):
(buffer_, threads) = self.__init_variables()
threads.append(Writer(buffer_, self.__generate_writer_lock(), 0, 0, 1))
threads.append(Reader(buffer_, self.__generate_reader_lock(), 0.1, 0.4))
threads.append(Writer(buffer_, self.__generate_writer_lock(), 0.2, 0, 2))
threads.append(Reader(buffer_, self.__generate_reader_lock(), 0.3, 0))
threads.append(Reader(buffer_, self.__generate_reader_lock(), 0.3, 0))
self.__start_and_join_threads(threads)
# The second writer should go before the second and the third reader
self.assertEqual([1], threads[1].buffer_read)
self.assertEqual([1, 2], threads[3].buffer_read)
self.assertEqual([1, 2], threads[4].buffer_read)
self.assertTrue(threads[0].exit_time < threads[1].entry_time)
self.assertTrue(threads[1].exit_time <= threads[2].entry_time)
self.assertTrue(threads[2].exit_time <= threads[3].entry_time)
self.assertTrue(threads[2].exit_time <= threads[4].entry_time)
def test_many_writers_priority(self):
(buffer_, threads) = self.__init_variables()
threads.append(Writer(buffer_, self.__generate_writer_lock(), 0, 0, 1))
threads.append(Reader(buffer_, self.__generate_reader_lock(), 0.1, 0.6))
threads.append(Writer(buffer_, self.__generate_writer_lock(), 0.2, 0.1, 2))
threads.append(Reader(buffer_, self.__generate_reader_lock(), 0.3, 0))
threads.append(Reader(buffer_, self.__generate_reader_lock(), 0.4, 0))
threads.append(Writer(buffer_, self.__generate_writer_lock(), 0.5, 0.1, 3))
self.__start_and_join_threads(threads)
# The two last writers should go first -- after the first reader and
# before the second and the third reader
self.assertEqual([1], threads[1].buffer_read)
self.assertEqual([1, 2, 3], threads[3].buffer_read)
self.assertEqual([1, 2, 3], threads[4].buffer_read)
self.assertTrue(threads[0].exit_time < threads[1].entry_time)
self.assertTrue(threads[1].exit_time <= threads[2].entry_time)
self.assertTrue(threads[1].exit_time <= threads[5].entry_time)
self.assertTrue(threads[2].exit_time <= threads[3].entry_time)
self.assertTrue(threads[2].exit_time <= threads[4].entry_time)
self.assertTrue(threads[5].exit_time <= threads[3].entry_time)
self.assertTrue(threads[5].exit_time <= threads[4].entry_time)
@staticmethod
def __init_variables():
buffer_ = []
threads = []
return (buffer_, threads)
@staticmethod
def __generate_reader_lock(name='RWLock'):
redis_conn = redis.StrictRedis()
return RWLock(redis_conn, name, mode=RWLock.READ)
@staticmethod
def __generate_writer_lock(name='RWLock'):
redis_conn = redis.StrictRedis()
return RWLock(redis_conn, name, mode=RWLock.WRITE)
@staticmethod
def __start_and_join_threads(threads):
for t in threads:
t.start()
for t in threads:
t.join()
| 40.237838
| 83
| 0.665368
| 1,001
| 7,444
| 4.617383
| 0.108891
| 0.054522
| 0.0701
| 0.059714
| 0.833622
| 0.809823
| 0.793379
| 0.7791
| 0.744483
| 0.70251
| 0
| 0.020894
| 0.222058
| 7,444
| 184
| 84
| 40.456522
| 0.777241
| 0.140516
| 0
| 0.508475
| 0
| 0
| 0.001998
| 0
| 0
| 0
| 0
| 0
| 0.220339
| 1
| 0.101695
| false
| 0
| 0.050847
| 0
| 0.20339
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
91cb1127e3771e7a4def369e850b01851c3918df
| 47
|
py
|
Python
|
builder_engine/custom_components/callbacks.py
|
DiablosWhisper/machine_learning_toolpack
|
3f4b82b549a3d70b95fc7a2c01959cd99d2b88b9
|
[
"Apache-2.0"
] | null | null | null |
builder_engine/custom_components/callbacks.py
|
DiablosWhisper/machine_learning_toolpack
|
3f4b82b549a3d70b95fc7a2c01959cd99d2b88b9
|
[
"Apache-2.0"
] | null | null | null |
builder_engine/custom_components/callbacks.py
|
DiablosWhisper/machine_learning_toolpack
|
3f4b82b549a3d70b95fc7a2c01959cd99d2b88b9
|
[
"Apache-2.0"
] | null | null | null |
from tensorflow.keras.callbacks import Callback
| 47
| 47
| 0.893617
| 6
| 47
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06383
| 47
| 1
| 47
| 47
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
37f746db1ec623189ef822b38dbd6fe48c44eafb
| 7,262
|
py
|
Python
|
taskutils/debouncedtask.py
|
emlynoregan/appenginetaskutils
|
755cc7cbe4b9badfc1d50f8bd7ebea6e1aae50ee
|
[
"Apache-2.0"
] | 12
|
2017-02-23T12:10:47.000Z
|
2019-11-18T19:58:10.000Z
|
taskutils/debouncedtask.py
|
anotherstarburst/appenginetaskutils
|
513ea7e61b17f0671e89bdae5f77f87d8ab51777
|
[
"Apache-2.0"
] | 4
|
2017-05-10T17:53:07.000Z
|
2019-05-12T15:49:57.000Z
|
taskutils/debouncedtask.py
|
anotherstarburst/appenginetaskutils
|
513ea7e61b17f0671e89bdae5f77f87d8ab51777
|
[
"Apache-2.0"
] | 5
|
2017-03-24T19:53:49.000Z
|
2019-02-17T00:07:32.000Z
|
'''
Created on 26Jul.,2017
@author: emlyn
'''
from google.appengine.api import memcache
from datetime import datetime, timedelta
import hashlib
from task import task
import functools
from taskutils.flash import make_flash
from taskutils.util import logdebug
def GenerateStableId(instring):
return hashlib.md5(instring).hexdigest()
def debouncedtask(f=None, initsec = 0, repeatsec = 10, debouncename = None, **taskkwargs):
if not f:
return functools.partial(debouncedtask, initsec = initsec, repeatsec = repeatsec, debouncename = debouncename, **taskkwargs)
@functools.wraps(f)
def rundebouncedtask(*args, **kwargs):
logdebug("x enter rundebouncedtask")
retval = None
client = memcache.Client()
cachekey = "dt%s" % (debouncename if debouncename else make_flash(f, args, kwargs))
logdebug("cachekey: %s" % cachekey)
tries = 1
maxtries = 400
cont = True
while cont and tries <= maxtries:
logdebug("tries=%s" % tries)
cont = False
eta = client.gets(cachekey)
logdebug("eta: %s" % eta)
now = datetime.utcnow()
logdebug("now: %s" % now)
nowplusinit = now + timedelta(seconds=initsec)
logdebug("nowplusinit: %s" % nowplusinit)
if not eta or eta < nowplusinit:
logdebug("A")
if not eta:
# we've never run this thing. Just go for it
countdown = 0
elif eta < now:
# we've run this thing in the past.
elapsedsectd = now - eta
elapsedsec = elapsedsectd.total_seconds()
if elapsedsec > repeatsec:
countdown = 0
else:
countdown = repeatsec - elapsedsec
else:
# eta is in the future, but too close for initsec. Need to schedule another full repeatsec ahead
futuresectd = eta - now
futuresec = futuresectd.total_seconds() # number of seconds in the future that we're scheduled to run
countdown = futuresec + repeatsec # let's schedule ahead one more repeat after that
if countdown < initsec:
countdown = initsec # don't schedule anything closer than initsec to now.
logdebug("countdown: %s" % countdown)
nexteta = now + timedelta(seconds=countdown)
logdebug("nexteta: %s" % nexteta)
if eta is None:
casresult = client.add(cachekey, nexteta)
else:
casresult = client.cas(cachekey, nexteta)
logdebug("CAS result: %s" % casresult)
if casresult or tries == maxtries:
if tries == maxtries:
logdebug("We got to maxtries in debounce, something screwy re: memcache. Better just call the function")
logdebug("B")
taskkwargscopy = dict(taskkwargs)
if "countdown" in taskkwargscopy:
del taskkwargscopy["countdown"]
if "eta" in taskkwargscopy:
del taskkwargscopy["eta"]
taskkwargscopy["countdown"] = countdown
retval = task(f, **taskkwargscopy)(*args, **kwargs) # if this fails, we'll get an exception back to the caller
else:
# either someone tried to do the same thing, or error. Let's try again
cont = True
tries += 1
# logdebug("About to sleep for %s" % tries)
# sleep(tries)
# else we're already scheduled to run far enough into the future, So, let's just stop
logdebug("leave rundebouncedtask: cont=%s, tries=%s" % (cont, tries))
return retval
return rundebouncedtask
# def debouncedtask(f=None, initsec = 0, repeatsec = 10, debouncename = None, **taskkwargs):
# if not f:
# return functools.partial(debouncedtask, initsec = initsec, repeatsec = repeatsec, debouncename = debouncename, **taskkwargs)
#
# @functools.wraps(f)
# def rundebouncedtask(*args, **kwargs):
# logdebug("enter rundebouncedtask")
# retval = None
# client = memcache.Client()
# cachekey = "dt%s" % (debouncename if debouncename else make_flash(f, args, kwargs))
# logdebug("cachekey: %s" % cachekey)
# eta = client.gets(cachekey)
# logdebug("eta: %s" % eta)
# now = datetime.utcnow()
# logdebug("now: %s" % now)
# nowplusinit = now + timedelta(seconds=initsec)
# logdebug("nowplusinit: %s" % nowplusinit)
# if not eta or eta < nowplusinit:
# logdebug("A")
# if not eta:
# # we've never run this thing. Just go for it
# countdown = 0
# elif eta < now:
# # we've run this thing in the past.
# elapsedsectd = now - eta
# elapsedsec = elapsedsectd.total_seconds()
# if elapsedsec > repeatsec:
# countdown = 0
# else:
# countdown = repeatsec - elapsedsec
# else:
# # eta is in the future, but too close for initsec. Need to schedule another full repeatsec ahead
# futuresectd = eta - now
# futuresec = futuresectd.total_seconds() # number of seconds in the future that we're scheduled to run
# countdown = futuresec + repeatsec # let's schedule ahead one more repeat after that
#
# if countdown < initsec:
# countdown = initsec # don't schedule anything closer than initsec to now.
#
# logdebug("countdown: %s" % countdown)
#
# nexteta = now + timedelta(seconds=countdown)
#
# logdebug("nexteta: %s" % nexteta)
#
# if eta is None:
# casresult = client.add(cachekey, nexteta)
# else:
# casresult = client.cas(cachekey, nexteta)
# logdebug("CAS result: %s" % casresult)
# if casresult:
# logdebug("B")
#
# taskkwargscopy = dict(taskkwargs)
# if "countdown" in taskkwargscopy:
# del taskkwargscopy["countdown"]
# if "eta" in taskkwargscopy:
# del taskkwargscopy["eta"]
# taskkwargscopy["countdown"] = countdown
# retval = task.task(f, **taskkwargscopy)(*args, **kwargs) # if this fails, we'll get an exception back to the caller
# # else someone's already done this. So let's just stop.
# # else we're already scheduled to run far enough into the future, So, let's just stop
# logdebug("leave rundebouncedtask")
# return retval
# return rundebouncedtask
| 43.746988
| 134
| 0.535941
| 714
| 7,262
| 5.441176
| 0.218487
| 0.007722
| 0.018533
| 0.033977
| 0.825225
| 0.821622
| 0.821622
| 0.821622
| 0.821622
| 0.821622
| 0
| 0.004873
| 0.37827
| 7,262
| 165
| 135
| 44.012121
| 0.855592
| 0.501928
| 0
| 0.109589
| 0
| 0
| 0.080329
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041096
| false
| 0
| 0.09589
| 0.013699
| 0.191781
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
53323a1fdbd6f8fdf2a2300529b2174d99559d3d
| 2,262
|
py
|
Python
|
VSR/DataLoader/YVDecoder.py
|
Kadantte/VideoSuperResolution
|
4c86e49d81c7a9bea1fe0780d651afc126768df3
|
[
"MIT"
] | 1,447
|
2018-06-04T08:44:07.000Z
|
2022-03-29T06:19:10.000Z
|
VSR/DataLoader/YVDecoder.py
|
AbdulMoqeet/VideoSuperResolution
|
82c3347554561ff9dfb5e86d9cf0a55239ca662e
|
[
"MIT"
] | 96
|
2018-08-29T01:02:45.000Z
|
2022-01-12T06:00:01.000Z
|
VSR/DataLoader/YVDecoder.py
|
AbdulMoqeet/VideoSuperResolution
|
82c3347554561ff9dfb5e86d9cf0a55239ca662e
|
[
"MIT"
] | 307
|
2018-06-26T13:35:54.000Z
|
2022-01-21T09:01:54.000Z
|
# Copyright (c) 2017-2020 Wenyi Tang.
# Author: Wenyi Tang
# Email: wenyitang@outlook.com
# Update: 2020 - 2 - 7
# Image customized decoder for YV12([Y][U/4][V/4]), YV21([Y][V/4][U/4])
# NOTE: [Y][U][V] means Y/U/V channel is a planar channel, [U/4] means
# U channel is sub-sampled by a factor of [2, 2]
import numpy as np
from PIL import ImageFile
class YV12Decoder(ImageFile.PyDecoder):
"""PIL.Image.DECODERS for YV12 format raw bytes
Registered in `Image.DECODERS`, don't use this class directly!
"""
def __init__(self, mode, *args):
super(YV12Decoder, self).__init__(mode, *args)
def decode(self, buffer):
if self.mode == 'L':
# discard UV channel
self.set_as_raw(buffer, 'L')
else:
w, h = self.im.size
y = np.frombuffer(buffer, 'uint8', count=w * h)
u = np.frombuffer(buffer, 'uint8', count=w * h // 4, offset=w * h)
v = np.frombuffer(
buffer, 'uint8', count=w * h // 4, offset=w * h + w * h // 4)
y = np.reshape(y, [h, w])
u = np.reshape(u, [h // 2, w // 2])
v = np.reshape(v, [h // 2, w // 2])
u = u[np.arange(h) // 2][:, np.arange(w) // 2]
v = v[np.arange(h) // 2][:, np.arange(w) // 2]
yuv = np.stack([y, u, v], axis=-1)
self.set_as_raw(yuv.flatten().tobytes())
return -1, 0
class YV21Decoder(ImageFile.PyDecoder):
"""PIL.Image.DECODERS for YV21 format raw bytes
Registered in `Image.DECODERS`, don't use this class directly!
"""
def __init__(self, mode, *args):
super(YV21Decoder, self).__init__(mode, *args)
def decode(self, buffer):
if self.mode == 'L':
# discard UV channel
self.set_as_raw(buffer, 'L')
else:
w, h = self.im.size
y = np.frombuffer(buffer, 'uint8', count=w * h)
v = np.frombuffer(buffer, 'uint8', count=w * h // 4, offset=w * h)
u = np.frombuffer(
buffer, 'uint8', count=w * h // 4, offset=w * h + w * h // 4)
y = np.reshape(y, [h, w])
u = np.reshape(u, [h // 2, w // 2])
v = np.reshape(v, [h // 2, w // 2])
u = u[np.arange(h) // 2][:, np.arange(w) // 2]
v = v[np.arange(h) // 2][:, np.arange(w) // 2]
yuv = np.stack([y, u, v], axis=-1)
self.set_as_raw(yuv.flatten().tobytes())
return -1, 0
| 32.314286
| 72
| 0.564103
| 370
| 2,262
| 3.383784
| 0.240541
| 0.022364
| 0.086262
| 0.110224
| 0.761981
| 0.761981
| 0.702875
| 0.702875
| 0.702875
| 0.702875
| 0
| 0.041667
| 0.246684
| 2,262
| 69
| 73
| 32.782609
| 0.693075
| 0.244474
| 0
| 0.761905
| 0
| 0
| 0.020238
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0
| 0.047619
| 0
| 0.238095
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
533b4994773887648b1c8911cc0f063d7b052318
| 39
|
py
|
Python
|
quotes_fetcher/__init__.py
|
daniloruslan/quotes_fetcher
|
26f06bd5f1d16467f70aa4aeff7fb4e360a546c3
|
[
"MIT"
] | null | null | null |
quotes_fetcher/__init__.py
|
daniloruslan/quotes_fetcher
|
26f06bd5f1d16467f70aa4aeff7fb4e360a546c3
|
[
"MIT"
] | null | null | null |
quotes_fetcher/__init__.py
|
daniloruslan/quotes_fetcher
|
26f06bd5f1d16467f70aa4aeff7fb4e360a546c3
|
[
"MIT"
] | null | null | null |
from quotes_fetcher.core import Symbols
| 39
| 39
| 0.897436
| 6
| 39
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 39
| 1
| 39
| 39
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5340029d3627b7a5e4266db2cb07548842dbeae4
| 1,685
|
py
|
Python
|
temboo/core/Library/UnlockPlaces/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/UnlockPlaces/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/UnlockPlaces/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.UnlockPlaces.ClosestMatchSearch import ClosestMatchSearch, ClosestMatchSearchInputSet, ClosestMatchSearchResultSet, ClosestMatchSearchChoreographyExecution
from temboo.Library.UnlockPlaces.FeatureLookup import FeatureLookup, FeatureLookupInputSet, FeatureLookupResultSet, FeatureLookupChoreographyExecution
from temboo.Library.UnlockPlaces.FootprintLookup import FootprintLookup, FootprintLookupInputSet, FootprintLookupResultSet, FootprintLookupChoreographyExecution
from temboo.Library.UnlockPlaces.NameAndFeatureSearch import NameAndFeatureSearch, NameAndFeatureSearchInputSet, NameAndFeatureSearchResultSet, NameAndFeatureSearchChoreographyExecution
from temboo.Library.UnlockPlaces.NameSearch import NameSearch, NameSearchInputSet, NameSearchResultSet, NameSearchChoreographyExecution
from temboo.Library.UnlockPlaces.PostCodeSearch import PostCodeSearch, PostCodeSearchInputSet, PostCodeSearchResultSet, PostCodeSearchChoreographyExecution
from temboo.Library.UnlockPlaces.SpacialFeaturesSearch import SpacialFeaturesSearch, SpacialFeaturesSearchInputSet, SpacialFeaturesSearchResultSet, SpacialFeaturesSearchChoreographyExecution
from temboo.Library.UnlockPlaces.SpacialNameSearch import SpacialNameSearch, SpacialNameSearchInputSet, SpacialNameSearchResultSet, SpacialNameSearchChoreographyExecution
from temboo.Library.UnlockPlaces.SupportedFeatureTypes import SupportedFeatureTypes, SupportedFeatureTypesInputSet, SupportedFeatureTypesResultSet, SupportedFeatureTypesChoreographyExecution
from temboo.Library.UnlockPlaces.UniqueNameSearch import UniqueNameSearch, UniqueNameSearchInputSet, UniqueNameSearchResultSet, UniqueNameSearchChoreographyExecution
| 153.181818
| 190
| 0.922849
| 100
| 1,685
| 15.55
| 0.45
| 0.064309
| 0.109325
| 0.186495
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041543
| 1,685
| 10
| 191
| 168.5
| 0.962848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.1
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
535d68a69d16940d33734f5cec51e4ae2e2828c3
| 66,306
|
py
|
Python
|
google/cloud/apigateway_v1/services/api_gateway_service/async_client.py
|
googleapis/python-api-gateway
|
6f1daac04f6e491e2e817ad5343c64efab5ae5c1
|
[
"Apache-2.0"
] | 1
|
2022-01-24T06:15:23.000Z
|
2022-01-24T06:15:23.000Z
|
google/cloud/apigateway_v1/services/api_gateway_service/async_client.py
|
renovate-bot/python-api-gateway
|
6f1daac04f6e491e2e817ad5343c64efab5ae5c1
|
[
"Apache-2.0"
] | 31
|
2021-03-24T17:40:29.000Z
|
2022-03-07T16:39:46.000Z
|
google/cloud/apigateway_v1/services/api_gateway_service/async_client.py
|
renovate-bot/python-api-gateway
|
6f1daac04f6e491e2e817ad5343c64efab5ae5c1
|
[
"Apache-2.0"
] | 2
|
2021-03-23T18:50:16.000Z
|
2022-01-29T08:07:28.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.apigateway_v1.services.api_gateway_service import pagers
from google.cloud.apigateway_v1.types import apigateway
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import ApiGatewayServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import ApiGatewayServiceGrpcAsyncIOTransport
from .client import ApiGatewayServiceClient
class ApiGatewayServiceAsyncClient:
"""The API Gateway Service is the interface for managing API
Gateways.
"""
_client: ApiGatewayServiceClient
DEFAULT_ENDPOINT = ApiGatewayServiceClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = ApiGatewayServiceClient.DEFAULT_MTLS_ENDPOINT
api_path = staticmethod(ApiGatewayServiceClient.api_path)
parse_api_path = staticmethod(ApiGatewayServiceClient.parse_api_path)
api_config_path = staticmethod(ApiGatewayServiceClient.api_config_path)
parse_api_config_path = staticmethod(ApiGatewayServiceClient.parse_api_config_path)
gateway_path = staticmethod(ApiGatewayServiceClient.gateway_path)
parse_gateway_path = staticmethod(ApiGatewayServiceClient.parse_gateway_path)
managed_service_path = staticmethod(ApiGatewayServiceClient.managed_service_path)
parse_managed_service_path = staticmethod(
ApiGatewayServiceClient.parse_managed_service_path
)
service_path = staticmethod(ApiGatewayServiceClient.service_path)
parse_service_path = staticmethod(ApiGatewayServiceClient.parse_service_path)
service_account_path = staticmethod(ApiGatewayServiceClient.service_account_path)
parse_service_account_path = staticmethod(
ApiGatewayServiceClient.parse_service_account_path
)
common_billing_account_path = staticmethod(
ApiGatewayServiceClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
ApiGatewayServiceClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(ApiGatewayServiceClient.common_folder_path)
parse_common_folder_path = staticmethod(
ApiGatewayServiceClient.parse_common_folder_path
)
common_organization_path = staticmethod(
ApiGatewayServiceClient.common_organization_path
)
parse_common_organization_path = staticmethod(
ApiGatewayServiceClient.parse_common_organization_path
)
common_project_path = staticmethod(ApiGatewayServiceClient.common_project_path)
parse_common_project_path = staticmethod(
ApiGatewayServiceClient.parse_common_project_path
)
common_location_path = staticmethod(ApiGatewayServiceClient.common_location_path)
parse_common_location_path = staticmethod(
ApiGatewayServiceClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ApiGatewayServiceAsyncClient: The constructed client.
"""
return ApiGatewayServiceClient.from_service_account_info.__func__(ApiGatewayServiceAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ApiGatewayServiceAsyncClient: The constructed client.
"""
return ApiGatewayServiceClient.from_service_account_file.__func__(ApiGatewayServiceAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@property
def transport(self) -> ApiGatewayServiceTransport:
"""Returns the transport used by the client instance.
Returns:
ApiGatewayServiceTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(ApiGatewayServiceClient).get_transport_class, type(ApiGatewayServiceClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, ApiGatewayServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the api gateway service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.ApiGatewayServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = ApiGatewayServiceClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def list_gateways(
self,
request: Union[apigateway.ListGatewaysRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListGatewaysAsyncPager:
r"""Lists Gateways in a given project and location.
Args:
request (Union[google.cloud.apigateway_v1.types.ListGatewaysRequest, dict]):
The request object. Request message for
ApiGatewayService.ListGateways
parent (:class:`str`):
Required. Parent resource of the Gateway, of the form:
``projects/*/locations/*``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.apigateway_v1.services.api_gateway_service.pagers.ListGatewaysAsyncPager:
Response message for
ApiGatewayService.ListGateways
Iterating over this object will yield
results and resolve additional pages
automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.ListGatewaysRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_gateways,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListGatewaysAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_gateway(
self,
request: Union[apigateway.GetGatewayRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> apigateway.Gateway:
r"""Gets details of a single Gateway.
Args:
request (Union[google.cloud.apigateway_v1.types.GetGatewayRequest, dict]):
The request object. Request message for
ApiGatewayService.GetGateway
name (:class:`str`):
Required. Resource name of the form:
``projects/*/locations/*/gateways/*``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.apigateway_v1.types.Gateway:
A Gateway is an API-aware HTTP proxy.
It performs API-Method and/or API-
Consumer specific actions based on an
API Config such as authentication,
policy enforcement, and backend
selection.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.GetGatewayRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_gateway,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def create_gateway(
self,
request: Union[apigateway.CreateGatewayRequest, dict] = None,
*,
parent: str = None,
gateway: apigateway.Gateway = None,
gateway_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a new Gateway in a given project and
location.
Args:
request (Union[google.cloud.apigateway_v1.types.CreateGatewayRequest, dict]):
The request object. Request message for
ApiGatewayService.CreateGateway
parent (:class:`str`):
Required. Parent resource of the Gateway, of the form:
``projects/*/locations/*``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
gateway (:class:`google.cloud.apigateway_v1.types.Gateway`):
Required. Gateway resource.
This corresponds to the ``gateway`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
gateway_id (:class:`str`):
Required. Identifier to assign to the
Gateway. Must be unique within scope of
the parent resource.
This corresponds to the ``gateway_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.apigateway_v1.types.Gateway` A Gateway is an API-aware HTTP proxy. It performs API-Method and/or
API-Consumer specific actions based on an API Config
such as authentication, policy enforcement, and
backend selection.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, gateway, gateway_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.CreateGatewayRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if gateway is not None:
request.gateway = gateway
if gateway_id is not None:
request.gateway_id = gateway_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_gateway,
default_retry=retries.Retry(
initial=1.0,
maximum=60.0,
multiplier=2,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable, core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
apigateway.Gateway,
metadata_type=apigateway.OperationMetadata,
)
# Done; return the response.
return response
async def update_gateway(
self,
request: Union[apigateway.UpdateGatewayRequest, dict] = None,
*,
gateway: apigateway.Gateway = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates the parameters of a single Gateway.
Args:
request (Union[google.cloud.apigateway_v1.types.UpdateGatewayRequest, dict]):
The request object. Request message for
ApiGatewayService.UpdateGateway
gateway (:class:`google.cloud.apigateway_v1.types.Gateway`):
Required. Gateway resource.
This corresponds to the ``gateway`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Field mask is used to specify the fields to be
overwritten in the Gateway resource by the update. The
fields specified in the update_mask are relative to the
resource, not the full request. A field will be
overwritten if it is in the mask. If the user does not
provide a mask then all fields will be overwritten.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.apigateway_v1.types.Gateway` A Gateway is an API-aware HTTP proxy. It performs API-Method and/or
API-Consumer specific actions based on an API Config
such as authentication, policy enforcement, and
backend selection.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([gateway, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.UpdateGatewayRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if gateway is not None:
request.gateway = gateway
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_gateway,
default_retry=retries.Retry(
initial=1.0,
maximum=60.0,
multiplier=2,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable, core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("gateway.name", request.gateway.name),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
apigateway.Gateway,
metadata_type=apigateway.OperationMetadata,
)
# Done; return the response.
return response
async def delete_gateway(
self,
request: Union[apigateway.DeleteGatewayRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes a single Gateway.
Args:
request (Union[google.cloud.apigateway_v1.types.DeleteGatewayRequest, dict]):
The request object. Request message for
ApiGatewayService.DeleteGateway
name (:class:`str`):
Required. Resource name of the form:
``projects/*/locations/*/gateways/*``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.DeleteGatewayRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_gateway,
default_retry=retries.Retry(
initial=1.0,
maximum=60.0,
multiplier=2,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable, core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=apigateway.OperationMetadata,
)
# Done; return the response.
return response
async def list_apis(
self,
request: Union[apigateway.ListApisRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListApisAsyncPager:
r"""Lists Apis in a given project and location.
Args:
request (Union[google.cloud.apigateway_v1.types.ListApisRequest, dict]):
The request object. Request message for
ApiGatewayService.ListApis
parent (:class:`str`):
Required. Parent resource of the API, of the form:
``projects/*/locations/global``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.apigateway_v1.services.api_gateway_service.pagers.ListApisAsyncPager:
Response message for
ApiGatewayService.ListApis
Iterating over this object will yield
results and resolve additional pages
automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.ListApisRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_apis,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListApisAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_api(
self,
request: Union[apigateway.GetApiRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> apigateway.Api:
r"""Gets details of a single Api.
Args:
request (Union[google.cloud.apigateway_v1.types.GetApiRequest, dict]):
The request object. Request message for
ApiGatewayService.GetApi
name (:class:`str`):
Required. Resource name of the form:
``projects/*/locations/global/apis/*``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.apigateway_v1.types.Api:
An API that can be served by one or
more Gateways.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.GetApiRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_api,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def create_api(
self,
request: Union[apigateway.CreateApiRequest, dict] = None,
*,
parent: str = None,
api: apigateway.Api = None,
api_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a new Api in a given project and location.
Args:
request (Union[google.cloud.apigateway_v1.types.CreateApiRequest, dict]):
The request object. Request message for
ApiGatewayService.CreateApi
parent (:class:`str`):
Required. Parent resource of the API, of the form:
``projects/*/locations/global``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
api (:class:`google.cloud.apigateway_v1.types.Api`):
Required. API resource.
This corresponds to the ``api`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
api_id (:class:`str`):
Required. Identifier to assign to the
API. Must be unique within scope of the
parent resource.
This corresponds to the ``api_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.apigateway_v1.types.Api` An API
that can be served by one or more Gateways.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, api, api_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.CreateApiRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if api is not None:
request.api = api
if api_id is not None:
request.api_id = api_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_api,
default_retry=retries.Retry(
initial=1.0,
maximum=60.0,
multiplier=2,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable, core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
apigateway.Api,
metadata_type=apigateway.OperationMetadata,
)
# Done; return the response.
return response
async def update_api(
self,
request: Union[apigateway.UpdateApiRequest, dict] = None,
*,
api: apigateway.Api = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates the parameters of a single Api.
Args:
request (Union[google.cloud.apigateway_v1.types.UpdateApiRequest, dict]):
The request object. Request message for
ApiGatewayService.UpdateApi
api (:class:`google.cloud.apigateway_v1.types.Api`):
Required. API resource.
This corresponds to the ``api`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Field mask is used to specify the fields to be
overwritten in the Api resource by the update. The
fields specified in the update_mask are relative to the
resource, not the full request. A field will be
overwritten if it is in the mask. If the user does not
provide a mask then all fields will be overwritten.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.apigateway_v1.types.Api` An API
that can be served by one or more Gateways.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([api, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.UpdateApiRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if api is not None:
request.api = api
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_api,
default_retry=retries.Retry(
initial=1.0,
maximum=60.0,
multiplier=2,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable, core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("api.name", request.api.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
apigateway.Api,
metadata_type=apigateway.OperationMetadata,
)
# Done; return the response.
return response
async def delete_api(
self,
request: Union[apigateway.DeleteApiRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes a single Api.
Args:
request (Union[google.cloud.apigateway_v1.types.DeleteApiRequest, dict]):
The request object. Request message for
ApiGatewayService.DeleteApi
name (:class:`str`):
Required. Resource name of the form:
``projects/*/locations/global/apis/*``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.DeleteApiRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_api,
default_retry=retries.Retry(
initial=1.0,
maximum=60.0,
multiplier=2,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable, core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=apigateway.OperationMetadata,
)
# Done; return the response.
return response
async def list_api_configs(
self,
request: Union[apigateway.ListApiConfigsRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListApiConfigsAsyncPager:
r"""Lists ApiConfigs in a given project and location.
Args:
request (Union[google.cloud.apigateway_v1.types.ListApiConfigsRequest, dict]):
The request object. Request message for
ApiGatewayService.ListApiConfigs
parent (:class:`str`):
Required. Parent resource of the API Config, of the
form: ``projects/*/locations/global/apis/*``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.apigateway_v1.services.api_gateway_service.pagers.ListApiConfigsAsyncPager:
Response message for
ApiGatewayService.ListApiConfigs
Iterating over this object will yield
results and resolve additional pages
automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.ListApiConfigsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_api_configs,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListApiConfigsAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_api_config(
self,
request: Union[apigateway.GetApiConfigRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> apigateway.ApiConfig:
r"""Gets details of a single ApiConfig.
Args:
request (Union[google.cloud.apigateway_v1.types.GetApiConfigRequest, dict]):
The request object. Request message for
ApiGatewayService.GetApiConfig
name (:class:`str`):
Required. Resource name of the form:
``projects/*/locations/global/apis/*/configs/*``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.apigateway_v1.types.ApiConfig:
An API Configuration is a combination
of settings for both the Managed Service
and Gateways serving this API Config.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.GetApiConfigRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_api_config,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def create_api_config(
self,
request: Union[apigateway.CreateApiConfigRequest, dict] = None,
*,
parent: str = None,
api_config: apigateway.ApiConfig = None,
api_config_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a new ApiConfig in a given project and
location.
Args:
request (Union[google.cloud.apigateway_v1.types.CreateApiConfigRequest, dict]):
The request object. Request message for
ApiGatewayService.CreateApiConfig
parent (:class:`str`):
Required. Parent resource of the API Config, of the
form: ``projects/*/locations/global/apis/*``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
api_config (:class:`google.cloud.apigateway_v1.types.ApiConfig`):
Required. API resource.
This corresponds to the ``api_config`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
api_config_id (:class:`str`):
Required. Identifier to assign to the
API Config. Must be unique within scope
of the parent resource.
This corresponds to the ``api_config_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.apigateway_v1.types.ApiConfig` An API Configuration is a combination of settings for both the Managed
Service and Gateways serving this API Config.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, api_config, api_config_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.CreateApiConfigRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if api_config is not None:
request.api_config = api_config
if api_config_id is not None:
request.api_config_id = api_config_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_api_config,
default_retry=retries.Retry(
initial=1.0,
maximum=60.0,
multiplier=2,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable, core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
apigateway.ApiConfig,
metadata_type=apigateway.OperationMetadata,
)
# Done; return the response.
return response
async def update_api_config(
self,
request: Union[apigateway.UpdateApiConfigRequest, dict] = None,
*,
api_config: apigateway.ApiConfig = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates the parameters of a single ApiConfig.
Args:
request (Union[google.cloud.apigateway_v1.types.UpdateApiConfigRequest, dict]):
The request object. Request message for
ApiGatewayService.UpdateApiConfig
api_config (:class:`google.cloud.apigateway_v1.types.ApiConfig`):
Required. API Config resource.
This corresponds to the ``api_config`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Field mask is used to specify the fields to be
overwritten in the ApiConfig resource by the update. The
fields specified in the update_mask are relative to the
resource, not the full request. A field will be
overwritten if it is in the mask. If the user does not
provide a mask then all fields will be overwritten.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.apigateway_v1.types.ApiConfig` An API Configuration is a combination of settings for both the Managed
Service and Gateways serving this API Config.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([api_config, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.UpdateApiConfigRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if api_config is not None:
request.api_config = api_config
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_api_config,
default_retry=retries.Retry(
initial=1.0,
maximum=60.0,
multiplier=2,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable, core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("api_config.name", request.api_config.name),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
apigateway.ApiConfig,
metadata_type=apigateway.OperationMetadata,
)
# Done; return the response.
return response
async def delete_api_config(
self,
request: Union[apigateway.DeleteApiConfigRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes a single ApiConfig.
Args:
request (Union[google.cloud.apigateway_v1.types.DeleteApiConfigRequest, dict]):
The request object. Request message for
ApiGatewayService.DeleteApiConfig
name (:class:`str`):
Required. Resource name of the form:
``projects/*/locations/global/apis/*/configs/*``
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = apigateway.DeleteApiConfigRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_api_config,
default_retry=retries.Retry(
initial=1.0,
maximum=60.0,
multiplier=2,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable, core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=apigateway.OperationMetadata,
)
# Done; return the response.
return response
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-api-gateway",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("ApiGatewayServiceAsyncClient",)
| 41.081784
| 180
| 0.610759
| 7,263
| 66,306
| 5.463858
| 0.058378
| 0.029483
| 0.011793
| 0.020285
| 0.85639
| 0.81451
| 0.801709
| 0.796996
| 0.771041
| 0.766934
| 0
| 0.004877
| 0.319684
| 66,306
| 1,613
| 181
| 41.107254
| 0.874859
| 0.163696
| 0
| 0.635605
| 0
| 0
| 0.04949
| 0.001672
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005563
| false
| 0
| 0.029207
| 0
| 0.100139
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5366fe151e10b6906902e58ab4d0b45651eb5a03
| 5,936
|
py
|
Python
|
test/integration/ggrc/converters/test_import_risk_assessment.py
|
pbedn/ggrc-core
|
12ae4720a430730835f1d02def62c0f6ef453521
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-08-26T06:56:01.000Z
|
2021-07-08T13:56:20.000Z
|
test/integration/ggrc/converters/test_import_risk_assessment.py
|
pbedn/ggrc-core
|
12ae4720a430730835f1d02def62c0f6ef453521
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2021-02-02T23:04:30.000Z
|
2022-03-02T09:54:47.000Z
|
test/integration/ggrc/converters/test_import_risk_assessment.py
|
pbedn/ggrc-core
|
12ae4720a430730835f1d02def62c0f6ef453521
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2016-08-23T10:51:19.000Z
|
2016-08-23T10:51:19.000Z
|
# Copyright (C) 2019 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Tests for Risk Assessment import."""
from collections import OrderedDict
import datetime
import ddt
from ggrc.converters import errors
from ggrc.models import all_models
from integration.ggrc import TestCase
from integration.ggrc.models import factories
@ddt.ddt
class TestRiskAssessmentImport(TestCase):
"""Risk Assessment Import Test Class"""
@ddt.data(
("valid_user@example.com,", []),
("user2@example.com,\nvalid_user@example.com",
[errors.MULTIPLE_ASSIGNEES.format(line=3, column_name="Risk Counsel")]),
)
@ddt.unpack
def test_ra_import_counsels(self, counsel, expected_warnings):
"""Tests Risk Counsel for Risk Assessment imported and set correctly"""
with factories.single_commit():
program = factories.ProgramFactory()
risk_assessment = factories.RiskAssessmentFactory(program=program)
factories.PersonFactory(email="valid_user@example.com")
data = OrderedDict([
("object_type", "RiskAssessment"),
("code", risk_assessment.slug),
("program", program.slug),
("title", "RA-1"),
("start date", datetime.date(2018, 10, 22)),
("end date", datetime.date(2018, 10, 31)),
("risk counsel", counsel),
])
expected_messages = {
"Risk Assessment": {
"row_warnings": set(expected_warnings),
},
}
response = self.import_data(data)
self._check_csv_response(response, expected_messages)
risk_assessment = all_models.RiskAssessment.query.one()
self.assertEqual(risk_assessment.ra_counsel.email,
"valid_user@example.com")
@ddt.data(
(" ;,", []),
("user2@example.com;\nuser3@example.com",
[
errors.MULTIPLE_ASSIGNEES.format(line=3,
column_name="Risk Counsel"),
errors.UNKNOWN_USER_WARNING.format(line=3,
email="user2@example.com"),
errors.UNKNOWN_USER_WARNING.format(line=3,
email="user3@example.com"),
]),
)
@ddt.unpack
def test_ra_import_wrong_counsels(self, counsel, expected_warnings):
"""Test import Risk Assessment counsel failed"""
with factories.single_commit():
program = factories.ProgramFactory()
risk_assessment = factories.RiskAssessmentFactory(program=program)
factories.PersonFactory(email="valid_user@example.com")
data = OrderedDict([
("object_type", "RiskAssessment"),
("code", risk_assessment.slug),
("program", program.slug),
("title", "RA-1"),
("start date", datetime.date(2018, 10, 22)),
("end date", datetime.date(2018, 10, 31)),
("risk counsel", counsel),
])
expected_messages = {
"Risk Assessment": {
"row_warnings": set(expected_warnings),
},
}
response = self.import_data(data)
self._check_csv_response(response, expected_messages)
risk_assessment = all_models.RiskAssessment.query.one()
self.assertFalse(risk_assessment.ra_counsel)
@ddt.data(
("valid_user@example.com", []),
("user2@example.com\nvalid_user@example.com",
[errors.MULTIPLE_ASSIGNEES.format(line=3, column_name="Risk Manager")]),
)
@ddt.unpack
def test_ra_import_managers(self, manager, expected_warnings):
"""Tests Risk Manager for Risk Assessment imported and set correctly"""
with factories.single_commit():
program = factories.ProgramFactory()
risk_assessment = factories.RiskAssessmentFactory(program=program)
factories.PersonFactory(email="valid_user@example.com")
data = OrderedDict([
("object_type", "RiskAssessment"),
("code", risk_assessment.slug),
("program", program.slug),
("title", "RA-1"),
("start date", datetime.date(2018, 10, 22)),
("end date", datetime.date(2018, 10, 31)),
("risk manager", manager),
])
expected_messages = {
"Risk Assessment": {
"row_warnings": set(expected_warnings),
},
}
response = self.import_data(data)
self._check_csv_response(response, expected_messages)
risk_assessment = all_models.RiskAssessment.query.one()
self.assertEqual(risk_assessment.ra_manager.email,
"valid_user@example.com")
@ddt.data(
("", []),
("user2@example.com\nuser3@example.com", [
errors.MULTIPLE_ASSIGNEES.format(line=3,
column_name="Risk Manager"),
errors.UNKNOWN_USER_WARNING.format(line=3,
email="user2@example.com"),
errors.UNKNOWN_USER_WARNING.format(line=3,
email="user3@example.com"),
]),
)
@ddt.unpack
def test_ra_import_wrong_managers(self, manager, expected_warnings):
"""Test import Risk Assessment manager failed"""
with factories.single_commit():
program = factories.ProgramFactory()
risk_assessment = factories.RiskAssessmentFactory(program=program)
factories.PersonFactory(email="valid_user@example.com")
data = OrderedDict([
("object_type", "RiskAssessment"),
("code", risk_assessment.slug),
("program", program.slug),
("title", "RA-1"),
("start date", datetime.date(2018, 10, 22)),
("end date", datetime.date(2018, 10, 31)),
("risk manager", manager),
])
expected_messages = {
"Risk Assessment": {
"row_warnings": set(expected_warnings),
},
}
response = self.import_data(data)
self._check_csv_response(response, expected_messages)
risk_assessment = all_models.RiskAssessment.query.one()
self.assertFalse(risk_assessment.ra_manager)
| 35.54491
| 79
| 0.628201
| 621
| 5,936
| 5.829308
| 0.165862
| 0.100552
| 0.038674
| 0.041989
| 0.877348
| 0.847514
| 0.812155
| 0.812155
| 0.812155
| 0.812155
| 0
| 0.020449
| 0.242082
| 5,936
| 166
| 80
| 35.759036
| 0.784174
| 0.066375
| 0
| 0.7
| 0
| 0
| 0.15608
| 0.060436
| 0
| 0
| 0
| 0
| 0.028571
| 1
| 0.028571
| false
| 0
| 0.114286
| 0
| 0.15
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5368a5fbbb41a43ea371f5a09a044a825fcc9c43
| 7,000
|
py
|
Python
|
admin_ip_whitelist/tests.py
|
dvska/django-admin-ip-whitelist
|
6692667808d7dd7774a06a9e3cba1bc82cacb32f
|
[
"Apache-1.1"
] | 12
|
2015-02-19T14:58:04.000Z
|
2021-11-29T13:41:04.000Z
|
admin_ip_whitelist/tests.py
|
dvska/django-admin-ip-whitelist
|
6692667808d7dd7774a06a9e3cba1bc82cacb32f
|
[
"Apache-1.1"
] | 6
|
2016-06-28T13:57:37.000Z
|
2018-06-22T18:22:24.000Z
|
admin_ip_whitelist/tests.py
|
dvska/django-admin-ip-whitelist
|
6692667808d7dd7774a06a9e3cba1bc82cacb32f
|
[
"Apache-1.1"
] | 12
|
2015-07-14T10:16:02.000Z
|
2021-08-15T16:44:41.000Z
|
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.test import TestCase, override_settings
from testfixtures import LogCapture, log_capture
from .models import ADMIN_ACCESS_WHITELIST_PREFIX, DjangoAdminAccessIPWhitelist
class MiddlewareTests(TestCase):
def tearDown(self):
cache.clear()
def test_other_view(self):
other_url = reverse('test')
response = self.client.get(other_url, REMOTE_ADDR="5.5.5.5")
self.assertEquals(response.status_code, 200)
self.assertEquals(response.content, 'Hello, World!')
def test_denied(self):
admin_url = reverse('admin:index')
with LogCapture() as l:
response = self.client.get(admin_url, REMOTE_ADDR="5.5.5.5")
expected_response = "You are banned.\n<!-- 5.5.5.5 -->"
self.assertEquals(response.status_code, 403) # forbidden
self.assertEquals(response.content, expected_response)
self.assertEquals(response['content-type'], 'text/html')
module_name = 'admin_ip_whitelist.middleware'
l.check(
(module_name, "DEBUG", "[django-admin-ip-whitelist] status = enabled"),
(module_name, "DEBUG", "GOT IP FROM Request: 5.5.5.5 and User Agent None"),
)
@override_settings(ADMIN_ACCESS_WHITELIST_MESSAGE='Leave, now.')
def test_denied_custom_message(self):
admin_url = reverse('admin:index')
with LogCapture() as l:
response = self.client.get(admin_url, REMOTE_ADDR="5.5.5.5")
expected_response = "Leave, now.\n<!-- 5.5.5.5 -->"
self.assertEquals(response.status_code, 403) # forbidden
self.assertEquals(response.content, expected_response)
self.assertEquals(response['content-type'], 'text/html')
module_name = 'admin_ip_whitelist.middleware'
l.check(
(module_name, "DEBUG", "[django-admin-ip-whitelist] status = enabled"),
(module_name, "DEBUG", "GOT IP FROM Request: 5.5.5.5 and User Agent None"),
)
@override_settings(ADMIN_ACCESS_WHITELIST_USE_HTTP_X_FORWARDED_FOR=True)
@log_capture()
def test_http_x_forward_for(self, l):
DjangoAdminAccessIPWhitelist.objects.create(
whitelist_reason='You are special',
ip='1.2.3.4',
)
admin_url = reverse('admin:index')
# Allowed, the FORWARDED address is being considered.
response = self.client.get(
admin_url, REMOTE_ADDR="5.5.5.5",
HTTP_X_FORWARDED_FOR="1.2.3.4, 4.4.4.4, 3.3.3.3")
self.assertEquals(response.status_code, 302) # redirect
expected_url = "{}?next={}".format(reverse('admin:login'), admin_url)
self.assertEquals(response.url, expected_url)
# Allowed, If no forwarded address is given, it falls back
# to REMOTE_ADDR.
response = self.client.get(
admin_url, REMOTE_ADDR="1.2.3.4")
self.assertEquals(response.status_code, 302) # redirect
expected_url = "{}?next={}".format(reverse('admin:login'), admin_url)
self.assertEquals(response.url, expected_url)
module_name = 'admin_ip_whitelist.middleware'
l.check(
(module_name, "DEBUG", "[django-admin-ip-whitelist] status = enabled"),
(module_name, "DEBUG", "GOT IP FROM Request: 1.2.3.4 and User Agent None"),
(module_name, "DEBUG", "/Admin access IP: DJANGO_ADMIN_ACCESS_WHITELIST:1.2.3.4"),
(module_name, "DEBUG", "GOT IP FROM Request: 1.2.3.4 and User Agent None"),
(module_name, "DEBUG", "/Admin access IP: DJANGO_ADMIN_ACCESS_WHITELIST:1.2.3.4"),
)
@log_capture()
def test_allowed(self, l):
DjangoAdminAccessIPWhitelist.objects.create(
whitelist_reason='You are special',
ip='1.2.3.4',
)
admin_url = reverse('admin:index')
# This user is not allowed.
response = self.client.get(admin_url, REMOTE_ADDR="5.5.5.5")
expected_response = "You are banned.\n<!-- 5.5.5.5 -->"
self.assertEquals(response.status_code, 403) # forbidden
self.assertEquals(response.content, expected_response)
self.assertEquals(response['content-type'], 'text/html')
# This user is special.
response = self.client.get(admin_url, REMOTE_ADDR="1.2.3.4")
self.assertEquals(response.status_code, 302) # redirect
expected_url = "{}?next={}".format(reverse('admin:login'), admin_url)
self.assertEquals(response.url, expected_url)
module_name = 'admin_ip_whitelist.middleware'
l.check(
(module_name, "DEBUG", "[django-admin-ip-whitelist] status = enabled"),
(module_name, "DEBUG", "GOT IP FROM Request: 5.5.5.5 and User Agent None"),
(module_name, "DEBUG", "GOT IP FROM Request: 1.2.3.4 and User Agent None"),
(module_name, "DEBUG", "/Admin access IP: DJANGO_ADMIN_ACCESS_WHITELIST:1.2.3.4"),
)
class ModelTests(TestCase):
def tearDown(self):
cache.clear()
def test_instance_create_and_update(self):
self.assertEquals(len(cache._cache.keys()), 0)
cache_key = ADMIN_ACCESS_WHITELIST_PREFIX + '1.2.3.4'
self.assertEquals(cache.get(cache_key), None)
obj = DjangoAdminAccessIPWhitelist.objects.create(
whitelist_reason='You are special',
ip='1.2.3.4',
)
self.assertEquals(len(cache._cache.keys()), 1)
self.assertEquals(cache.get(cache_key), '1')
obj.ip = '5.5.5.5'
obj.save()
self.assertEquals(cache.get(cache_key), None)
new_cache_key = ADMIN_ACCESS_WHITELIST_PREFIX + '5.5.5.5'
self.assertEquals(cache.get(new_cache_key), '1')
self.assertEquals(len(cache._cache.keys()), 1)
def test_instance_delete(self):
self.assertEquals(len(cache._cache.keys()), 0)
obj = DjangoAdminAccessIPWhitelist.objects.create(
whitelist_reason='You are special',
ip='1.2.3.4',
)
self.assertEquals(len(cache._cache.keys()), 1)
cache_key = ADMIN_ACCESS_WHITELIST_PREFIX + '1.2.3.4'
self.assertEquals(cache.get(cache_key), '1')
obj.delete()
self.assertEquals(cache.get(cache_key), None)
def test_unicode(self):
obj = DjangoAdminAccessIPWhitelist.objects.create(
whitelist_reason=u"This is what a cat looks like: \U0001F408",
ip='1.2.3.4',
)
self.assertEquals(
unicode(obj),
u"Whitelisted 1.2.3.4 (This is what a cat looks like: \U0001F408)"
)
def test_str(self):
obj = DjangoAdminAccessIPWhitelist.objects.create(
whitelist_reason=u"This is what a cat looks like: \U0001F408",
ip='1.2.3.4',
)
self.assertEquals(
str(obj),
"Whitelisted 1.2.3.4 (This is what a cat looks like: \xF0\x9F\x90\x88)"
)
| 40.462428
| 94
| 0.630714
| 899
| 7,000
| 4.746385
| 0.150167
| 0.01828
| 0.01828
| 0.017811
| 0.810405
| 0.810405
| 0.79775
| 0.768924
| 0.720881
| 0.711976
| 0
| 0.036056
| 0.239286
| 7,000
| 172
| 95
| 40.697674
| 0.765258
| 0.032714
| 0
| 0.652174
| 0
| 0.014493
| 0.225477
| 0.049564
| 0
| 0
| 0
| 0
| 0.217391
| 1
| 0.07971
| false
| 0
| 0.036232
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7260abb264d0709fcccc65f7bd08f381a5b9847d
| 89
|
py
|
Python
|
zhanglyLabTools/__init__.py
|
CuteBeaeast/zhanglyLabTools
|
2a3cb17bd97a831518d5d989265758ee0f880732
|
[
"MIT"
] | 1
|
2021-03-16T06:12:24.000Z
|
2021-03-16T06:12:24.000Z
|
zhanglyLabTools/__init__.py
|
CuteBeaeast/zhanglyLabTools
|
2a3cb17bd97a831518d5d989265758ee0f880732
|
[
"MIT"
] | null | null | null |
zhanglyLabTools/__init__.py
|
CuteBeaeast/zhanglyLabTools
|
2a3cb17bd97a831518d5d989265758ee0f880732
|
[
"MIT"
] | null | null | null |
from .script_generator import script_generator
from .code_generator import code_generator
| 44.5
| 46
| 0.898876
| 12
| 89
| 6.333333
| 0.416667
| 0.394737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078652
| 89
| 2
| 47
| 44.5
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
72841a52ffaef28e94fa6ad27200721a66fe5ce9
| 19,425
|
py
|
Python
|
kopf/on.py
|
brainbreaker/kopf
|
a2c2ffbc1c52c70553b2b374b9395ed97a64fa2a
|
[
"MIT"
] | null | null | null |
kopf/on.py
|
brainbreaker/kopf
|
a2c2ffbc1c52c70553b2b374b9395ed97a64fa2a
|
[
"MIT"
] | null | null | null |
kopf/on.py
|
brainbreaker/kopf
|
a2c2ffbc1c52c70553b2b374b9395ed97a64fa2a
|
[
"MIT"
] | null | null | null |
"""
The decorators for the event handlers. Usually used as::
import kopf
@kopf.on.create('zalando.org', 'v1', 'kopfexamples')
def creation_handler(**kwargs):
pass
This module is a part of the framework's public interface.
"""
# TODO: add cluster=True support (different API methods)
import warnings
from typing import Optional, Callable
from kopf.reactor import causation
from kopf.reactor import errors as errors_
from kopf.reactor import handlers
from kopf.reactor import handling
from kopf.reactor import registries
from kopf.structs import callbacks
from kopf.structs import dicts
from kopf.structs import filters
from kopf.structs import resources
ActivityDecorator = Callable[[callbacks.ActivityFn], callbacks.ActivityFn]
ResourceWatchingDecorator = Callable[[callbacks.ResourceWatchingFn], callbacks.ResourceWatchingFn]
ResourceChangingDecorator = Callable[[callbacks.ResourceChangingFn], callbacks.ResourceChangingFn]
def startup( # lgtm[py/similar-function]
*,
id: Optional[str] = None,
errors: Optional[errors_.ErrorsMode] = None,
timeout: Optional[float] = None,
retries: Optional[int] = None,
backoff: Optional[float] = None,
cooldown: Optional[float] = None, # deprecated, use `backoff`
registry: Optional[registries.OperatorRegistry] = None,
) -> ActivityDecorator:
def decorator(fn: callbacks.ActivityFn) -> callbacks.ActivityFn:
real_registry = registry if registry is not None else registries.get_default_registry()
real_id = registries.generate_id(fn=fn, id=id)
handler = handlers.ActivityHandler(
fn=fn, id=real_id,
errors=errors, timeout=timeout, retries=retries, backoff=backoff, cooldown=cooldown,
activity=causation.Activity.STARTUP,
)
real_registry.activity_handlers.append(handler)
return fn
return decorator
def cleanup( # lgtm[py/similar-function]
*,
id: Optional[str] = None,
errors: Optional[errors_.ErrorsMode] = None,
timeout: Optional[float] = None,
retries: Optional[int] = None,
backoff: Optional[float] = None,
cooldown: Optional[float] = None, # deprecated, use `backoff`
registry: Optional[registries.OperatorRegistry] = None,
) -> ActivityDecorator:
def decorator(fn: callbacks.ActivityFn) -> callbacks.ActivityFn:
real_registry = registry if registry is not None else registries.get_default_registry()
real_id = registries.generate_id(fn=fn, id=id)
handler = handlers.ActivityHandler(
fn=fn, id=real_id,
errors=errors, timeout=timeout, retries=retries, backoff=backoff, cooldown=cooldown,
activity=causation.Activity.CLEANUP,
)
real_registry.activity_handlers.append(handler)
return fn
return decorator
def login( # lgtm[py/similar-function]
*,
id: Optional[str] = None,
errors: Optional[errors_.ErrorsMode] = None,
timeout: Optional[float] = None,
retries: Optional[int] = None,
backoff: Optional[float] = None,
cooldown: Optional[float] = None, # deprecated, use `backoff`
registry: Optional[registries.OperatorRegistry] = None,
) -> ActivityDecorator:
""" ``@kopf.on.login()`` handler for custom (re-)authentication. """
def decorator(fn: callbacks.ActivityFn) -> callbacks.ActivityFn:
real_registry = registry if registry is not None else registries.get_default_registry()
real_id = registries.generate_id(fn=fn, id=id)
handler = handlers.ActivityHandler(
fn=fn, id=real_id,
errors=errors, timeout=timeout, retries=retries, backoff=backoff, cooldown=cooldown,
activity=causation.Activity.AUTHENTICATION,
)
real_registry.activity_handlers.append(handler)
return fn
return decorator
def probe( # lgtm[py/similar-function]
*,
id: Optional[str] = None,
errors: Optional[errors_.ErrorsMode] = None,
timeout: Optional[float] = None,
retries: Optional[int] = None,
backoff: Optional[float] = None,
cooldown: Optional[float] = None, # deprecated, use `backoff`
registry: Optional[registries.OperatorRegistry] = None,
) -> ActivityDecorator:
""" ``@kopf.on.probe()`` handler for arbitrary liveness metrics. """
def decorator(fn: callbacks.ActivityFn) -> callbacks.ActivityFn:
real_registry = registry if registry is not None else registries.get_default_registry()
real_id = registries.generate_id(fn=fn, id=id)
handler = handlers.ActivityHandler(
fn=fn, id=real_id,
errors=errors, timeout=timeout, retries=retries, backoff=backoff, cooldown=cooldown,
activity=causation.Activity.PROBE,
)
real_registry.activity_handlers.append(handler)
return fn
return decorator
def resume( # lgtm[py/similar-function]
group: str, version: str, plural: str,
*,
id: Optional[str] = None,
errors: Optional[errors_.ErrorsMode] = None,
timeout: Optional[float] = None,
retries: Optional[int] = None,
backoff: Optional[float] = None,
cooldown: Optional[float] = None, # deprecated, use `backoff`
registry: Optional[registries.OperatorRegistry] = None,
deleted: Optional[bool] = None,
labels: Optional[filters.MetaFilter] = None,
annotations: Optional[filters.MetaFilter] = None,
when: Optional[callbacks.WhenFilterFn] = None,
) -> ResourceChangingDecorator:
""" ``@kopf.on.resume()`` handler for the object resuming on operator (re)start. """
def decorator(fn: callbacks.ResourceChangingFn) -> callbacks.ResourceChangingFn:
_warn_deprecated_filters(labels, annotations)
real_registry = registry if registry is not None else registries.get_default_registry()
real_resource = resources.Resource(group, version, plural)
real_id = registries.generate_id(fn=fn, id=id)
handler = handlers.ResourceChangingHandler(
fn=fn, id=real_id, field=None,
errors=errors, timeout=timeout, retries=retries, backoff=backoff, cooldown=cooldown,
labels=labels, annotations=annotations, when=when,
initial=True, deleted=deleted, requires_finalizer=None,
reason=None,
)
real_registry.resource_changing_handlers[real_resource].append(handler)
return fn
return decorator
def create( # lgtm[py/similar-function]
group: str, version: str, plural: str,
*,
id: Optional[str] = None,
errors: Optional[errors_.ErrorsMode] = None,
timeout: Optional[float] = None,
retries: Optional[int] = None,
backoff: Optional[float] = None,
cooldown: Optional[float] = None, # deprecated; use backoff.
registry: Optional[registries.OperatorRegistry] = None,
labels: Optional[filters.MetaFilter] = None,
annotations: Optional[filters.MetaFilter] = None,
when: Optional[callbacks.WhenFilterFn] = None,
) -> ResourceChangingDecorator:
""" ``@kopf.on.create()`` handler for the object creation. """
def decorator(fn: callbacks.ResourceChangingFn) -> callbacks.ResourceChangingFn:
_warn_deprecated_filters(labels, annotations)
real_registry = registry if registry is not None else registries.get_default_registry()
real_resource = resources.Resource(group, version, plural)
real_id = registries.generate_id(fn=fn, id=id)
handler = handlers.ResourceChangingHandler(
fn=fn, id=real_id, field=None,
errors=errors, timeout=timeout, retries=retries, backoff=backoff, cooldown=cooldown,
labels=labels, annotations=annotations, when=when,
initial=None, deleted=None, requires_finalizer=None,
reason=causation.Reason.CREATE,
)
real_registry.resource_changing_handlers[real_resource].append(handler)
return fn
return decorator
def update( # lgtm[py/similar-function]
group: str, version: str, plural: str,
*,
id: Optional[str] = None,
errors: Optional[errors_.ErrorsMode] = None,
timeout: Optional[float] = None,
retries: Optional[int] = None,
backoff: Optional[float] = None,
cooldown: Optional[float] = None, # deprecated, use `backoff`
registry: Optional[registries.OperatorRegistry] = None,
labels: Optional[filters.MetaFilter] = None,
annotations: Optional[filters.MetaFilter] = None,
when: Optional[callbacks.WhenFilterFn] = None,
) -> ResourceChangingDecorator:
""" ``@kopf.on.update()`` handler for the object update or change. """
def decorator(fn: callbacks.ResourceChangingFn) -> callbacks.ResourceChangingFn:
_warn_deprecated_filters(labels, annotations)
real_registry = registry if registry is not None else registries.get_default_registry()
real_resource = resources.Resource(group, version, plural)
real_id = registries.generate_id(fn=fn, id=id)
handler = handlers.ResourceChangingHandler(
fn=fn, id=real_id, field=None,
errors=errors, timeout=timeout, retries=retries, backoff=backoff, cooldown=cooldown,
labels=labels, annotations=annotations, when=when,
initial=None, deleted=None, requires_finalizer=None,
reason=causation.Reason.UPDATE,
)
real_registry.resource_changing_handlers[real_resource].append(handler)
return fn
return decorator
def delete( # lgtm[py/similar-function]
group: str, version: str, plural: str,
*,
id: Optional[str] = None,
errors: Optional[errors_.ErrorsMode] = None,
timeout: Optional[float] = None,
retries: Optional[int] = None,
backoff: Optional[float] = None,
cooldown: Optional[float] = None, # deprecated, use `backoff`
registry: Optional[registries.OperatorRegistry] = None,
optional: Optional[bool] = None,
labels: Optional[filters.MetaFilter] = None,
annotations: Optional[filters.MetaFilter] = None,
when: Optional[callbacks.WhenFilterFn] = None,
) -> ResourceChangingDecorator:
""" ``@kopf.on.delete()`` handler for the object deletion. """
def decorator(fn: callbacks.ResourceChangingFn) -> callbacks.ResourceChangingFn:
_warn_deprecated_filters(labels, annotations)
real_registry = registry if registry is not None else registries.get_default_registry()
real_resource = resources.Resource(group, version, plural)
real_id = registries.generate_id(fn=fn, id=id)
handler = handlers.ResourceChangingHandler(
fn=fn, id=real_id, field=None,
errors=errors, timeout=timeout, retries=retries, backoff=backoff, cooldown=cooldown,
labels=labels, annotations=annotations, when=when,
initial=None, deleted=None, requires_finalizer=bool(not optional),
reason=causation.Reason.DELETE,
)
real_registry.resource_changing_handlers[real_resource].append(handler)
return fn
return decorator
def field( # lgtm[py/similar-function]
group: str, version: str, plural: str,
field: dicts.FieldSpec,
*,
id: Optional[str] = None,
errors: Optional[errors_.ErrorsMode] = None,
timeout: Optional[float] = None,
retries: Optional[int] = None,
backoff: Optional[float] = None,
cooldown: Optional[float] = None, # deprecated, use `backoff`
registry: Optional[registries.OperatorRegistry] = None,
labels: Optional[filters.MetaFilter] = None,
annotations: Optional[filters.MetaFilter] = None,
when: Optional[callbacks.WhenFilterFn] = None,
) -> ResourceChangingDecorator:
""" ``@kopf.on.field()`` handler for the individual field changes. """
def decorator(fn: callbacks.ResourceChangingFn) -> callbacks.ResourceChangingFn:
_warn_deprecated_filters(labels, annotations)
real_registry = registry if registry is not None else registries.get_default_registry()
real_resource = resources.Resource(group, version, plural)
real_field = dicts.parse_field(field) or None # to not store tuple() as a no-field case.
real_id = registries.generate_id(fn=fn, id=id, suffix=".".join(real_field or []))
handler = handlers.ResourceChangingHandler(
fn=fn, id=real_id, field=real_field,
errors=errors, timeout=timeout, retries=retries, backoff=backoff, cooldown=cooldown,
labels=labels, annotations=annotations, when=when,
initial=None, deleted=None, requires_finalizer=None,
reason=None,
)
real_registry.resource_changing_handlers[real_resource].append(handler)
return fn
return decorator
def event( # lgtm[py/similar-function]
group: str, version: str, plural: str,
*,
id: Optional[str] = None,
registry: Optional[registries.OperatorRegistry] = None,
labels: Optional[filters.MetaFilter] = None,
annotations: Optional[filters.MetaFilter] = None,
when: Optional[callbacks.WhenFilterFn] = None,
) -> ResourceWatchingDecorator:
""" ``@kopf.on.event()`` handler for the silent spies on the events. """
def decorator(fn: callbacks.ResourceWatchingFn) -> callbacks.ResourceWatchingFn:
_warn_deprecated_filters(labels, annotations)
real_registry = registry if registry is not None else registries.get_default_registry()
real_resource = resources.Resource(group, version, plural)
real_id = registries.generate_id(fn=fn, id=id)
handler = handlers.ResourceWatchingHandler(
fn=fn, id=real_id,
errors=None, timeout=None, retries=None, backoff=None, cooldown=None,
labels=labels, annotations=annotations, when=when,
)
real_registry.resource_watching_handlers[real_resource].append(handler)
return fn
return decorator
# TODO: find a better name: `@kopf.on.this` is confusing and does not fully
# TODO: match with the `@kopf.on.{cause}` pattern, where cause is create/update/delete.
def this( # lgtm[py/similar-function]
*,
id: Optional[str] = None,
errors: Optional[errors_.ErrorsMode] = None,
timeout: Optional[float] = None,
retries: Optional[int] = None,
backoff: Optional[float] = None,
cooldown: Optional[float] = None, # deprecated, use `backoff`
registry: Optional[registries.ResourceChangingRegistry] = None,
labels: Optional[filters.MetaFilter] = None,
annotations: Optional[filters.MetaFilter] = None,
when: Optional[callbacks.WhenFilterFn] = None,
) -> ResourceChangingDecorator:
"""
``@kopf.on.this()`` decorator for the dynamically generated sub-handlers.
Can be used only inside of the handler function.
It is efficiently a syntax sugar to look like all other handlers::
@kopf.on.create('zalando.org', 'v1', 'kopfexamples')
def create(*, spec, **kwargs):
for task in spec.get('tasks', []):
@kopf.on.this(id=f'task_{task}')
def create_task(*, spec, task=task, **kwargs):
pass
In this example, having spec.tasks set to ``[abc, def]``, this will create
the following handlers: ``create``, ``create/task_abc``, ``create/task_def``.
The parent handler is not considered as finished if there are unfinished
sub-handlers left. Since the sub-handlers will be executed in the regular
reactor and lifecycle, with multiple low-level events (one per iteration),
the parent handler will also be executed multiple times, and is expected
to produce the same (or at least predictable) set of sub-handlers.
In addition, keep its logic idempotent (not failing on the repeated calls).
Note: ``task=task`` is needed to freeze the closure variable, so that every
create function will have its own value, not the latest in the for-cycle.
"""
def decorator(fn: callbacks.ResourceChangingFn) -> callbacks.ResourceChangingFn:
_warn_deprecated_filters(labels, annotations)
parent_handler = handling.handler_var.get()
real_registry = registry if registry is not None else handling.subregistry_var.get()
real_id = registries.generate_id(fn=fn, id=id,
prefix=parent_handler.id if parent_handler else None)
handler = handlers.ResourceChangingHandler(
fn=fn, id=real_id, field=None,
errors=errors, timeout=timeout, retries=retries, backoff=backoff, cooldown=cooldown,
labels=labels, annotations=annotations, when=when,
initial=None, deleted=None, requires_finalizer=None,
reason=None,
)
real_registry.append(handler)
return fn
return decorator
def register( # lgtm[py/similar-function]
fn: callbacks.ResourceChangingFn,
*,
id: Optional[str] = None,
errors: Optional[errors_.ErrorsMode] = None,
timeout: Optional[float] = None,
retries: Optional[int] = None,
backoff: Optional[float] = None,
cooldown: Optional[float] = None, # deprecated, use `backoff`
registry: Optional[registries.ResourceChangingRegistry] = None,
labels: Optional[filters.MetaFilter] = None,
annotations: Optional[filters.MetaFilter] = None,
when: Optional[callbacks.WhenFilterFn] = None,
) -> callbacks.ResourceChangingFn:
"""
Register a function as a sub-handler of the currently executed handler.
Example::
@kopf.on.create('zalando.org', 'v1', 'kopfexamples')
def create_it(spec, **kwargs):
for task in spec.get('tasks', []):
def create_single_task(task=task, **_):
pass
kopf.register(id=task, fn=create_single_task)
This is efficiently an equivalent for::
@kopf.on.create('zalando.org', 'v1', 'kopfexamples')
def create_it(spec, **kwargs):
for task in spec.get('tasks', []):
@kopf.on.this(id=task)
def create_single_task(task=task, **_):
pass
"""
decorator = this(
id=id, registry=registry,
errors=errors, timeout=timeout, retries=retries, backoff=backoff, cooldown=cooldown,
labels=labels, annotations=annotations, when=when,
)
return decorator(fn)
def _warn_deprecated_filters(
labels: Optional[filters.MetaFilter],
annotations: Optional[filters.MetaFilter],
) -> None:
if labels is not None:
for key, val in labels.items():
if val is None:
warnings.warn(
f"`None` for label filters is deprecated; use kopf.PRESENT.",
DeprecationWarning, stacklevel=2)
if annotations is not None:
for key, val in annotations.items():
if val is None:
warnings.warn(
f"`None` for annotation filters is deprecated; use kopf.PRESENT.",
DeprecationWarning, stacklevel=2)
| 44.147727
| 98
| 0.662239
| 2,098
| 19,425
| 6.051001
| 0.118208
| 0.033793
| 0.044191
| 0.038834
| 0.772273
| 0.769122
| 0.764395
| 0.753289
| 0.750217
| 0.719417
| 0
| 0.000404
| 0.234801
| 19,425
| 439
| 99
| 44.248292
| 0.853673
| 0.17704
| 0
| 0.766467
| 0
| 0
| 0.007621
| 0
| 0
| 0
| 0
| 0.002278
| 0
| 1
| 0.071856
| false
| 0
| 0.032934
| 0
| 0.173653
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
729a7dcc9af2af3f9385db019d3e8c7fdebda86c
| 41
|
py
|
Python
|
build/lib/PyQuantum/TCL/DensityMatrix.py
|
alexfmsu/pyquantum
|
78b09987cbfecf549e67b919bb5cb2046b21ad44
|
[
"MIT"
] | null | null | null |
build/lib/PyQuantum/TCL/DensityMatrix.py
|
alexfmsu/pyquantum
|
78b09987cbfecf549e67b919bb5cb2046b21ad44
|
[
"MIT"
] | null | null | null |
build/lib/PyQuantum/TCL/DensityMatrix.py
|
alexfmsu/pyquantum
|
78b09987cbfecf549e67b919bb5cb2046b21ad44
|
[
"MIT"
] | 2
|
2020-07-28T08:40:06.000Z
|
2022-02-16T23:04:58.000Z
|
from PyQuantum.TC.DensityMatrix import *
| 20.5
| 40
| 0.829268
| 5
| 41
| 6.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 1
| 41
| 41
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
72a998c8c381658d681d295359ca8640e61ed60f
| 26,208
|
py
|
Python
|
jupyterlab_git/tests/test_handlers.py
|
DarkmatterVale/jupyterlab-git
|
77f4e4bf5bec75a93471c387bb402a46fba83a39
|
[
"BSD-3-Clause"
] | null | null | null |
jupyterlab_git/tests/test_handlers.py
|
DarkmatterVale/jupyterlab-git
|
77f4e4bf5bec75a93471c387bb402a46fba83a39
|
[
"BSD-3-Clause"
] | 1
|
2021-07-02T06:05:18.000Z
|
2021-07-19T14:47:13.000Z
|
jupyterlab_git/tests/test_handlers.py
|
sarahspak/jupyterlab-git
|
c5e09cbf8690821cb842ec021e11f213fc9d54da
|
[
"BSD-3-Clause"
] | null | null | null |
import json
from unittest.mock import ANY, MagicMock, Mock, call, patch
import pytest
import tornado
from jupyterlab_git.git import Git
from jupyterlab_git.handlers import NAMESPACE, setup_handlers, GitHandler
from .testutils import assert_http_error, maybe_future
def test_mapping_added():
mock_web_app = Mock()
mock_web_app.settings = {"base_url": "nb_base_url"}
setup_handlers(mock_web_app)
mock_web_app.add_handlers.assert_called_once_with(".*", ANY)
@pytest.mark.parametrize(
"path, with_cm", (("url", False), ("url/to/path", False), ("url/to/path", True))
)
def test_GitHandler_url2localpath(path, with_cm, jp_web_app, jp_root_dir):
req = tornado.httputil.HTTPServerRequest()
req.connection = MagicMock()
handler = GitHandler(jp_web_app, req)
if with_cm:
assert (
str(jp_root_dir / path),
handler.contents_manager,
) == handler.url2localpath(path, with_cm)
else:
assert str(jp_root_dir / path) == handler.url2localpath(path, with_cm)
@patch("jupyterlab_git.handlers.GitAllHistoryHandler.git", spec=Git)
async def test_all_history_handler_localbranch(mock_git, jp_fetch, jp_root_dir):
# Given
show_top_level = {"code": 0, "path": "foo"}
branch = "branch_foo"
log = "log_foo"
status = "status_foo"
local_path = jp_root_dir / "test_path"
mock_git.show_top_level.return_value = maybe_future(show_top_level)
mock_git.branch.return_value = maybe_future(branch)
mock_git.log.return_value = maybe_future(log)
mock_git.status.return_value = maybe_future(status)
# When
body = {"history_count": 25}
response = await jp_fetch(
NAMESPACE, local_path.name, "all_history", body=json.dumps(body), method="POST"
)
# Then
mock_git.show_top_level.assert_called_with(str(local_path))
mock_git.branch.assert_called_with(str(local_path))
mock_git.log.assert_called_with(str(local_path), 25)
mock_git.status.assert_called_with(str(local_path))
assert response.code == 200
payload = json.loads(response.body)
assert payload == {
"code": show_top_level["code"],
"data": {
"show_top_level": show_top_level,
"branch": branch,
"log": log,
"status": status,
},
}
@patch("jupyterlab_git.git.execute")
async def test_git_show_prefix(mock_execute, jp_fetch, jp_root_dir):
# Given
path = "path/to/repo"
local_path = jp_root_dir / "test_path"
mock_execute.return_value = maybe_future((0, str(path), ""))
# When
response = await jp_fetch(
NAMESPACE,
local_path.name + "/subfolder",
"show_prefix",
body="{}",
method="POST",
)
# Then
assert response.code == 200
payload = json.loads(response.body)
assert payload["path"] == str(path)
mock_execute.assert_has_calls(
[
call(
["git", "rev-parse", "--show-prefix"],
cwd=str(local_path / "subfolder"),
),
]
)
@patch("jupyterlab_git.git.execute")
async def test_git_show_prefix_not_a_git_repo(mock_execute, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
mock_execute.return_value = maybe_future(
(128, "", "fatal: not a git repository (or any")
)
# When
response = await jp_fetch(
NAMESPACE,
local_path.name + "/subfolder",
"show_prefix",
body="{}",
method="POST",
)
# Then
assert response.code == 200
payload = json.loads(response.body)
assert payload["path"] is None
mock_execute.assert_has_calls(
[
call(
["git", "rev-parse", "--show-prefix"],
cwd=str(local_path / "subfolder"),
),
]
)
@patch("jupyterlab_git.git.execute")
async def test_git_show_top_level(mock_execute, jp_fetch, jp_root_dir):
# Given
path = "path/to/repo"
local_path = jp_root_dir / "test_path"
mock_execute.return_value = maybe_future((0, str(path), ""))
# When
response = await jp_fetch(
NAMESPACE,
local_path.name + "/subfolder",
"show_top_level",
body="{}",
method="POST",
)
# Then
assert response.code == 200
payload = json.loads(response.body)
assert payload["path"] == str(path)
mock_execute.assert_has_calls(
[
call(
["git", "rev-parse", "--show-toplevel"],
cwd=str(local_path / "subfolder"),
),
]
)
@patch("jupyterlab_git.git.execute")
async def test_git_show_top_level_not_a_git_repo(mock_execute, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
mock_execute.return_value = maybe_future(
(128, "", "fatal: not a git repository (or any")
)
# When
response = await jp_fetch(
NAMESPACE,
local_path.name + "/subfolder",
"show_top_level",
body="{}",
method="POST",
)
# Then
assert response.code == 200
payload = json.loads(response.body)
assert payload["path"] is None
mock_execute.assert_has_calls(
[
call(
["git", "rev-parse", "--show-toplevel"],
cwd=str(local_path / "subfolder"),
),
]
)
@patch("jupyterlab_git.handlers.GitBranchHandler.git", spec=Git)
async def test_branch_handler_localbranch(mock_git, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
branch = {
"code": 0,
"branches": [
{
"is_current_branch": True,
"is_remote_branch": False,
"name": "feature-foo",
"upstream": "origin/feature-foo",
"top_commit": "abcdefghijklmnopqrstuvwxyz01234567890123",
"tag": None,
},
{
"is_current_branch": False,
"is_remote_branch": False,
"name": "master",
"upstream": "origin/master",
"top_commit": "abcdefghijklmnopqrstuvwxyz01234567890123",
"tag": None,
},
{
"is_current_branch": False,
"is_remote_branch": False,
"name": "feature-bar",
"upstream": None,
"top_commit": "01234567899999abcdefghijklmnopqrstuvwxyz",
"tag": None,
},
{
"is_current_branch": False,
"is_remote_branch": True,
"name": "origin/feature-foo",
"upstream": None,
"top_commit": "abcdefghijklmnopqrstuvwxyz01234567890123",
"tag": None,
},
{
"is_current_branch": False,
"is_remote_branch": True,
"name": "origin/master",
"upstream": None,
"top_commit": "abcdefghijklmnopqrstuvwxyz01234567890123",
"tag": None,
},
],
}
mock_git.branch.return_value = maybe_future(branch)
# When
response = await jp_fetch(
NAMESPACE, local_path.name, "branch", body="{}", method="POST"
)
# Then
mock_git.branch.assert_called_with(str(local_path))
assert response.code == 200
payload = json.loads(response.body)
assert payload == {"code": 0, "branches": branch["branches"]}
@patch("jupyterlab_git.handlers.GitLogHandler.git", spec=Git)
async def test_log_handler(mock_git, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
log = {"code": 0, "commits": []}
mock_git.log.return_value = maybe_future(log)
# When
body = {"history_count": 20}
response = await jp_fetch(
NAMESPACE, local_path.name, "log", body=json.dumps(body), method="POST"
)
# Then
mock_git.log.assert_called_with(str(local_path), 20)
assert response.code == 200
payload = json.loads(response.body)
assert payload == log
@patch("jupyterlab_git.handlers.GitLogHandler.git", spec=Git)
async def test_log_handler_no_history_count(mock_git, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
log = {"code": 0, "commits": []}
mock_git.log.return_value = maybe_future(log)
# When
response = await jp_fetch(
NAMESPACE, local_path.name, "log", body="{}", method="POST"
)
# Then
mock_git.log.assert_called_with(str(local_path), 25)
assert response.code == 200
payload = json.loads(response.body)
assert payload == log
@patch("jupyterlab_git.handlers.GitPushHandler.git", spec=Git)
async def test_push_handler_localbranch(mock_git, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
mock_git.get_current_branch.return_value = maybe_future("localbranch")
mock_git.get_upstream_branch.return_value = maybe_future(
{"code": 0, "remote_short_name": ".", "remote_branch": "localbranch"}
)
mock_git.push.return_value = maybe_future({"code": 0})
# When
response = await jp_fetch(
NAMESPACE, local_path.name, "push", body="{}", method="POST"
)
# Then
mock_git.get_current_branch.assert_called_with(str(local_path))
mock_git.get_upstream_branch.assert_called_with(str(local_path), "localbranch")
mock_git.push.assert_called_with(
".", "HEAD:localbranch", str(local_path), None, False
)
assert response.code == 200
payload = json.loads(response.body)
assert payload == {"code": 0}
@patch("jupyterlab_git.handlers.GitPushHandler.git", spec=Git)
async def test_push_handler_remotebranch(mock_git, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
mock_git.get_current_branch.return_value = maybe_future("foo/bar")
upstream = {
"code": 0,
"remote_short_name": "origin/something",
"remote_branch": "remote-branch-name",
}
mock_git.get_upstream_branch.return_value = maybe_future(upstream)
mock_git.push.return_value = maybe_future({"code": 0})
# When
response = await jp_fetch(
NAMESPACE, local_path.name, "push", body="{}", method="POST"
)
# Then
mock_git.get_current_branch.assert_called_with(str(local_path))
mock_git.get_upstream_branch.assert_called_with(str(local_path), "foo/bar")
mock_git.push.assert_called_with(
"origin/something", "HEAD:remote-branch-name", str(local_path), None, False
)
assert response.code == 200
payload = json.loads(response.body)
assert payload == {"code": 0}
@patch("jupyterlab_git.handlers.GitPushHandler.git", spec=Git)
async def test_push_handler_noupstream(mock_git, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
mock_git.get_current_branch.return_value = maybe_future("foo")
upstream = {
"code": 128,
"command": "",
"message": "fatal: no upstream configured for branch 'foo'",
}
mock_git.get_upstream_branch.return_value = maybe_future(upstream)
mock_git.config.return_value = maybe_future({"options": dict()})
mock_git.remote_show.return_value = maybe_future({})
mock_git.push.return_value = maybe_future({"code": 0})
# When
with pytest.raises(tornado.httpclient.HTTPClientError) as e:
await jp_fetch(NAMESPACE, local_path.name, "push", body="{}", method="POST")
response = e.value.response
# Then
mock_git.get_current_branch.assert_called_with(str(local_path))
mock_git.get_upstream_branch.assert_called_with(str(local_path), "foo")
mock_git.config.assert_called_with(str(local_path))
mock_git.remote_show.assert_called_with(str(local_path))
mock_git.push.assert_not_called()
assert response.code == 500
payload = json.loads(response.body)
assert payload == {
"code": 128,
"message": "fatal: The current branch foo has no upstream branch.",
"remotes": list(),
}
@patch("jupyterlab_git.handlers.GitPushHandler.git", spec=Git)
async def test_push_handler_multipleupstream(mock_git, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
remotes = ["origin", "upstream"]
mock_git.get_current_branch.return_value = maybe_future("foo")
upstream = {"code": -1, "message": "oups"}
mock_git.get_upstream_branch.return_value = maybe_future(upstream)
mock_git.config.return_value = maybe_future({"options": dict()})
mock_git.remote_show.return_value = maybe_future({"remotes": remotes})
mock_git.push.return_value = maybe_future({"code": 0})
# When
with pytest.raises(tornado.httpclient.HTTPClientError) as e:
await jp_fetch(NAMESPACE, local_path.name, "push", body="{}", method="POST")
response = e.value.response
# Then
mock_git.get_current_branch.assert_called_with(str(local_path))
mock_git.get_upstream_branch.assert_called_with(str(local_path), "foo")
mock_git.config.assert_called_with(str(local_path))
mock_git.remote_show.assert_called_with(str(local_path))
mock_git.push.assert_not_called()
assert response.code == 500
payload = json.loads(response.body)
assert payload == {
"code": 128,
"message": "fatal: The current branch foo has no upstream branch.",
"remotes": remotes,
}
@patch("jupyterlab_git.handlers.GitPushHandler.git", spec=Git)
async def test_push_handler_noupstream_unique_remote(mock_git, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
remote = "origin"
mock_git.get_current_branch.return_value = maybe_future("foo")
upstream = {"code": -1, "message": "oups"}
mock_git.get_upstream_branch.return_value = maybe_future(upstream)
mock_git.config.return_value = maybe_future({"options": dict()})
mock_git.remote_show.return_value = maybe_future({"remotes": [remote]})
mock_git.push.return_value = maybe_future({"code": 0})
# When
response = await jp_fetch(
NAMESPACE, local_path.name, "push", body="{}", method="POST"
)
# Then
mock_git.get_current_branch.assert_called_with(str(local_path))
mock_git.get_upstream_branch.assert_called_with(str(local_path), "foo")
mock_git.config.assert_called_with(str(local_path))
mock_git.remote_show.assert_called_with(str(local_path))
mock_git.push.assert_called_with(
remote, "foo", str(local_path), None, set_upstream=True
)
assert response.code == 200
payload = json.loads(response.body)
assert payload == {"code": 0}
@patch("jupyterlab_git.handlers.GitPushHandler.git", spec=Git)
async def test_push_handler_noupstream_pushdefault(mock_git, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
remote = "rorigin"
mock_git.get_current_branch.return_value = maybe_future("foo")
upstream = {"code": -1, "message": "oups"}
mock_git.get_upstream_branch.return_value = maybe_future(upstream)
mock_git.config.return_value = maybe_future(
{"options": {"remote.pushdefault": remote}}
)
mock_git.remote_show.return_value = maybe_future({"remotes": [remote, "upstream"]})
mock_git.push.return_value = maybe_future({"code": 0})
# When
response = await jp_fetch(
NAMESPACE, local_path.name, "push", body="{}", method="POST"
)
# Then
mock_git.get_current_branch.assert_called_with(str(local_path))
mock_git.get_upstream_branch.assert_called_with(str(local_path), "foo")
mock_git.config.assert_called_with(str(local_path))
mock_git.remote_show.assert_called_with(str(local_path))
mock_git.push.assert_called_with(
remote, "foo", str(local_path), None, set_upstream=True
)
assert response.code == 200
payload = json.loads(response.body)
assert payload == {"code": 0}
@patch("jupyterlab_git.handlers.GitPushHandler.git", spec=Git)
async def test_push_handler_noupstream_pass_remote_nobranch(
mock_git, jp_fetch, jp_root_dir
):
# Given
local_path = jp_root_dir / "test_path"
mock_git.get_current_branch.return_value = maybe_future("foo")
upstream = {"code": -1, "message": "oups"}
mock_git.get_upstream_branch.return_value = maybe_future(upstream)
mock_git.config.return_value = maybe_future({"options": dict()})
mock_git.remote_show.return_value = maybe_future({})
mock_git.push.return_value = maybe_future({"code": 0})
remote = "online"
# When
body = {"remote": remote}
response = await jp_fetch(
NAMESPACE, local_path.name, "push", body=json.dumps(body), method="POST"
)
# Then
mock_git.get_current_branch.assert_called_with(str(local_path))
mock_git.get_upstream_branch.assert_called_with(str(local_path), "foo")
mock_git.config.assert_not_called()
mock_git.remote_show.assert_not_called()
mock_git.push.assert_called_with(remote, "HEAD:foo", str(local_path), None, True)
assert response.code == 200
payload = json.loads(response.body)
assert payload == {"code": 0}
@patch("jupyterlab_git.handlers.GitPushHandler.git", spec=Git)
async def test_push_handler_noupstream_pass_remote_branch(
mock_git, jp_fetch, jp_root_dir
):
# Given
local_path = jp_root_dir / "test_path"
mock_git.get_current_branch.return_value = maybe_future("foo")
upstream = {"code": -1, "message": "oups"}
mock_git.get_upstream_branch.return_value = maybe_future(upstream)
mock_git.config.return_value = maybe_future({"options": dict()})
mock_git.remote_show.return_value = maybe_future({})
mock_git.push.return_value = maybe_future({"code": 0})
remote = "online"
remote_branch = "onfoo"
# When
body = {"remote": "/".join((remote, remote_branch))}
response = await jp_fetch(
NAMESPACE, local_path.name, "push", body=json.dumps(body), method="POST"
)
# Then
mock_git.get_current_branch.assert_called_with(str(local_path))
mock_git.get_upstream_branch.assert_called_with(str(local_path), "foo")
mock_git.config.assert_not_called()
mock_git.remote_show.assert_not_called()
mock_git.push.assert_called_with(
remote, "HEAD:" + remote_branch, str(local_path), None, True
)
assert response.code == 200
payload = json.loads(response.body)
assert payload == {"code": 0}
@patch("jupyterlab_git.handlers.GitUpstreamHandler.git", spec=Git)
async def test_upstream_handler_forward_slashes(mock_git, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
mock_git.get_current_branch.return_value = maybe_future("foo/bar")
upstream = {
"code": 0,
"remote_short_name": "origin/something",
"remote_branch": "foo/bar",
}
mock_git.get_upstream_branch.return_value = maybe_future(upstream)
# When
response = await jp_fetch(
NAMESPACE, local_path.name, "upstream", body="{}", method="POST"
)
# Then
mock_git.get_current_branch.assert_called_with(str(local_path))
mock_git.get_upstream_branch.assert_called_with(str(local_path), "foo/bar")
assert response.code == 200
payload = json.loads(response.body)
assert payload == upstream
@patch("jupyterlab_git.handlers.GitUpstreamHandler.git", spec=Git)
async def test_upstream_handler_localbranch(mock_git, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
mock_git.get_current_branch.return_value = maybe_future("foo/bar")
upstream = {"code": 0, "remote_short_name": ".", "remote_branch": "foo/bar"}
mock_git.get_upstream_branch.return_value = maybe_future(upstream)
# When
response = await jp_fetch(
NAMESPACE, local_path.name, "upstream", body="{}", method="POST"
)
# Then
mock_git.get_current_branch.assert_called_with(str(local_path))
mock_git.get_upstream_branch.assert_called_with(str(local_path), "foo/bar")
assert response.code == 200
payload = json.loads(response.body)
assert payload == upstream
@patch("jupyterlab_git.git.execute")
async def test_content(mock_execute, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
filename = "my/file"
content = "dummy content file\nwith multiple lines"
mock_execute.side_effect = [
maybe_future((0, "1\t1\t{}".format(filename), "")),
maybe_future((0, content, "")),
]
# When
body = {
"filename": filename,
"reference": {"git": "previous"},
}
response = await jp_fetch(
NAMESPACE, local_path.name, "content", body=json.dumps(body), method="POST"
)
# Then
assert response.code == 200
payload = json.loads(response.body)
assert payload["content"] == content
mock_execute.assert_has_calls(
[
call(
["git", "show", "{}:{}".format("previous", filename)],
cwd=str(local_path),
),
],
)
@patch("jupyterlab_git.git.execute")
async def test_content_working(mock_execute, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
filename = "my/file"
content = "dummy content file\nwith multiple lines"
mock_execute.side_effect = [
maybe_future((0, content, "")),
]
dummy_file = local_path / filename
dummy_file.parent.mkdir(parents=True)
dummy_file.write_text(content)
# When
body = {
"filename": filename,
"reference": {"special": "WORKING"},
}
response = await jp_fetch(
NAMESPACE, local_path.name, "content", body=json.dumps(body), method="POST"
)
# Then
assert response.code == 200
payload = json.loads(response.body)
assert payload["content"] == content
@patch("jupyterlab_git.git.execute")
async def test_content_index(mock_execute, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
filename = "my/file"
content = "dummy content file\nwith multiple lines"
mock_execute.side_effect = [
maybe_future((0, "1\t1\t{}".format(filename), "")),
maybe_future((0, content, "")),
]
# When
body = {
"filename": filename,
"reference": {"special": "INDEX"},
}
response = await jp_fetch(
NAMESPACE, local_path.name, "content", body=json.dumps(body), method="POST"
)
# Then
assert response.code == 200
payload = json.loads(response.body)
assert payload["content"] == content
mock_execute.assert_has_calls(
[
call(
["git", "show", "{}:{}".format("", filename)],
cwd=str(local_path),
),
],
)
@patch("jupyterlab_git.git.execute")
async def test_content_unknown_special(mock_execute, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
filename = "my/file"
content = "dummy content file\nwith multiple lines"
mock_execute.side_effect = [
maybe_future((0, "1\t1\t{}".format(filename), "")),
maybe_future((0, content, "")),
]
# When
body = {
"filename": filename,
"reference": {"special": "unknown"},
}
with pytest.raises(tornado.httpclient.HTTPClientError) as e:
await jp_fetch(
NAMESPACE, local_path.name, "content", body=json.dumps(body), method="POST"
)
assert_http_error(e, 500, expected_message="unknown special ref")
@patch("jupyterlab_git.git.execute")
async def test_content_show_handled_error(mock_execute, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
filename = "my/file"
mock_execute.return_value = maybe_future(
(
-1,
"",
"fatal: Path '{}' does not exist (neither on disk nor in the index)".format(
filename
),
)
)
# When
body = {
"filename": filename,
"reference": {"git": "current"},
}
response = await jp_fetch(
NAMESPACE, local_path.name, "content", body=json.dumps(body), method="POST"
)
# Then
assert response.code == 200
payload = json.loads(response.body)
assert payload["content"] == ""
@patch("jupyterlab_git.git.execute")
async def test_content_binary(mock_execute, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
filename = "my/file"
mock_execute.return_value = maybe_future((0, "-\t-\t{}".format(filename), ""))
# When
body = {
"filename": filename,
"reference": {"git": "current"},
}
# Then
with pytest.raises(tornado.httpclient.HTTPClientError) as e:
await jp_fetch(
NAMESPACE, local_path.name, "content", body=json.dumps(body), method="POST"
)
assert_http_error(e, 500, expected_message="file is not UTF-8")
@patch("jupyterlab_git.git.execute")
async def test_content_show_unhandled_error(mock_execute, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
filename = "my/file"
mock_execute.return_value = maybe_future((-1, "", "Dummy error"))
# When
body = {
"filename": filename,
"reference": {"git": "current"},
}
# Then
with pytest.raises(tornado.httpclient.HTTPClientError) as e:
await jp_fetch(
NAMESPACE, local_path.name, "content", body=json.dumps(body), method="POST"
)
assert_http_error(e, 500, expected_message="Dummy error")
@patch("jupyterlab_git.git.execute")
async def test_content_getcontent_deleted_file(mock_execute, jp_fetch, jp_root_dir):
# Given
local_path = jp_root_dir / "test_path"
filename = "my/absent_file"
content = "dummy content file\nwith multiple lines"
# When
body = {
"filename": filename,
"reference": {"special": "WORKING"},
}
# Then
response = await jp_fetch(
NAMESPACE, local_path.name, "content", body=json.dumps(body), method="POST"
)
# Then
assert response.code == 200
payload = json.loads(response.body)
assert payload["content"] == ""
| 31.2
| 88
| 0.648046
| 3,253
| 26,208
| 4.918537
| 0.063941
| 0.04725
| 0.030938
| 0.07425
| 0.88475
| 0.87975
| 0.87
| 0.853
| 0.8385
| 0.815188
| 0
| 0.010961
| 0.22371
| 26,208
| 839
| 89
| 31.237187
| 0.775473
| 0.015835
| 0
| 0.653659
| 0
| 0
| 0.160535
| 0.044217
| 0
| 0
| 0
| 0
| 0.172358
| 1
| 0.003252
| false
| 0.003252
| 0.011382
| 0
| 0.014634
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
72bbced62d4ddfecd524482ed49bec43dd604089
| 463
|
py
|
Python
|
descarteslabs/common/graft/client/__init__.py
|
descarteslabs/descarteslabs-python
|
efc874d6062603dc424c9646287a9b1f8636e7ac
|
[
"Apache-2.0"
] | 167
|
2017-03-23T22:16:58.000Z
|
2022-03-08T09:19:30.000Z
|
descarteslabs/common/graft/client/__init__.py
|
descarteslabs/descarteslabs-python
|
efc874d6062603dc424c9646287a9b1f8636e7ac
|
[
"Apache-2.0"
] | 93
|
2017-03-23T22:11:40.000Z
|
2021-12-13T18:38:53.000Z
|
descarteslabs/common/graft/client/__init__.py
|
descarteslabs/descarteslabs-python
|
efc874d6062603dc424c9646287a9b1f8636e7ac
|
[
"Apache-2.0"
] | 46
|
2017-03-25T19:12:14.000Z
|
2021-08-15T18:04:29.000Z
|
from .client import (
is_delayed,
is_function_graft,
value_graft,
keyref_graft,
apply_graft,
function_graft,
merge_value_grafts,
guid,
isolate_keys,
parametrize,
consistent_guid,
)
__all__ = [
"is_delayed",
"is_function_graft",
"value_graft",
"keyref_graft",
"apply_graft",
"function_graft",
"merge_value_grafts",
"guid",
"isolate_keys",
"parametrize",
"consistent_guid",
]
| 16.535714
| 25
| 0.632829
| 48
| 463
| 5.5625
| 0.375
| 0.194757
| 0.082397
| 0.142322
| 0.928839
| 0.928839
| 0.928839
| 0.928839
| 0.928839
| 0.928839
| 0
| 0
| 0.259179
| 463
| 27
| 26
| 17.148148
| 0.778426
| 0
| 0
| 0
| 0
| 0
| 0.291577
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.038462
| 0
| 0.038462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f404addf23180d9c1ab987a4f28446e1147b8a90
| 825
|
py
|
Python
|
qqbot/core/exception/error.py
|
SuperKuroko/botpy
|
8e9a69ebe4d52a9a84b25047595925525495f402
|
[
"MIT"
] | 63
|
2021-12-27T05:55:07.000Z
|
2022-03-28T12:28:53.000Z
|
qqbot/core/exception/error.py
|
SuperKuroko/botpy
|
8e9a69ebe4d52a9a84b25047595925525495f402
|
[
"MIT"
] | 9
|
2022-01-06T03:33:30.000Z
|
2022-03-27T10:49:36.000Z
|
qqbot/core/exception/error.py
|
SuperKuroko/botpy
|
8e9a69ebe4d52a9a84b25047595925525495f402
|
[
"MIT"
] | 12
|
2021-12-31T07:46:12.000Z
|
2022-03-28T13:34:09.000Z
|
# -*- coding: utf-8 -*-
class WebsocketError:
CodeInvalidSession = 9001
CodeConnCloseErr = 9005
class AuthenticationFailedError(RuntimeError):
def __init__(self, msg):
self.msgs = msg
def __str__(self):
return self.msgs
class NotFoundError(RuntimeError):
def __init__(self, msg):
self.msgs = msg
def __str__(self):
return self.msgs
class MethodNotAllowedError(RuntimeError):
def __init__(self, msg):
self.msgs = msg
def __str__(self):
return self.msgs
class SequenceNumberError(RuntimeError):
def __init__(self, msg):
self.msgs = msg
def __str__(self):
return self.msgs
class ServerError(RuntimeError):
def __init__(self, msg):
self.msgs = msg
def __str__(self):
return self.msgs
| 17.553191
| 46
| 0.642424
| 89
| 825
| 5.505618
| 0.247191
| 0.163265
| 0.193878
| 0.234694
| 0.663265
| 0.663265
| 0.663265
| 0.663265
| 0.663265
| 0.663265
| 0
| 0.014827
| 0.264242
| 825
| 46
| 47
| 17.934783
| 0.792422
| 0.025455
| 0
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.357143
| false
| 0
| 0
| 0.178571
| 0.821429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
f45fb80b93113fb5ea369307d248bc9f549a43a2
| 1,107
|
py
|
Python
|
app/_test/suite/unit/test/tools/test_units.py
|
ewie/gbd-websuite
|
6f2814c7bb64d11cb5a0deec712df751718fb3e1
|
[
"Apache-2.0"
] | null | null | null |
app/_test/suite/unit/test/tools/test_units.py
|
ewie/gbd-websuite
|
6f2814c7bb64d11cb5a0deec712df751718fb3e1
|
[
"Apache-2.0"
] | null | null | null |
app/_test/suite/unit/test/tools/test_units.py
|
ewie/gbd-websuite
|
6f2814c7bb64d11cb5a0deec712df751718fb3e1
|
[
"Apache-2.0"
] | null | null | null |
import gws.tools.units
import _test.util as u
def test_parse():
nn, uu = gws.tools.units.parse('24.5mm', units=['px', 'mm'])
assert (nn, uu) == (24.5, 'mm')
nn, uu = gws.tools.units.parse('24.5 m', units=['px', 'mm'])
assert (nn, uu) == (24500, 'mm')
nn, uu = gws.tools.units.parse('1234 mm', units=['px', 'm'])
assert (nn, uu) == (1.234, 'm')
nn, uu = gws.tools.units.parse('1234 cm', units=['px', 'm'])
assert (nn, uu) == (12.34, 'm')
nn, uu = gws.tools.units.parse('1234 cm', units=['px', 'km'])
assert (nn, uu) == (0.01234, 'km')
nn, uu = gws.tools.units.parse(1234, units=['px', 'm'], default='px')
assert (nn, uu) == (1234, 'px')
nn, uu = gws.tools.units.parse('1234', units=['px', 'm'], default='px')
assert (nn, uu) == (1234, 'px')
with u.raises(ValueError):
nn, uu = gws.tools.units.parse('1234', units=['px', 'm'])
with u.raises(ValueError):
nn, uu = gws.tools.units.parse('1234 in', units=['px', 'm'])
with u.raises(ValueError):
nn, uu = gws.tools.units.parse('1234 BLAH', units=['px', 'm'])
| 30.75
| 75
| 0.541102
| 175
| 1,107
| 3.411429
| 0.2
| 0.113903
| 0.239531
| 0.201005
| 0.824121
| 0.824121
| 0.711893
| 0.58124
| 0.58124
| 0.58124
| 0
| 0.077361
| 0.205962
| 1,107
| 35
| 76
| 31.628571
| 0.60182
| 0
| 0
| 0.217391
| 0
| 0
| 0.095754
| 0
| 0
| 0
| 0
| 0
| 0.304348
| 1
| 0.043478
| true
| 0
| 0.086957
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.