hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3d03e7e9418a784fa6ae34ca818d4e877cfbf8bb
| 6,545
|
py
|
Python
|
loldib/getratings/models/NA/na_khazix/na_khazix_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_khazix/na_khazix_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_khazix/na_khazix_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Khazix_Top_Aatrox(Ratings):
pass
class NA_Khazix_Top_Ahri(Ratings):
pass
class NA_Khazix_Top_Akali(Ratings):
pass
class NA_Khazix_Top_Alistar(Ratings):
pass
class NA_Khazix_Top_Amumu(Ratings):
pass
class NA_Khazix_Top_Anivia(Ratings):
pass
class NA_Khazix_Top_Annie(Ratings):
pass
class NA_Khazix_Top_Ashe(Ratings):
pass
class NA_Khazix_Top_AurelionSol(Ratings):
pass
class NA_Khazix_Top_Azir(Ratings):
pass
class NA_Khazix_Top_Bard(Ratings):
pass
class NA_Khazix_Top_Blitzcrank(Ratings):
pass
class NA_Khazix_Top_Brand(Ratings):
pass
class NA_Khazix_Top_Braum(Ratings):
pass
class NA_Khazix_Top_Caitlyn(Ratings):
pass
class NA_Khazix_Top_Camille(Ratings):
pass
class NA_Khazix_Top_Cassiopeia(Ratings):
pass
class NA_Khazix_Top_Chogath(Ratings):
pass
class NA_Khazix_Top_Corki(Ratings):
pass
class NA_Khazix_Top_Darius(Ratings):
pass
class NA_Khazix_Top_Diana(Ratings):
pass
class NA_Khazix_Top_Draven(Ratings):
pass
class NA_Khazix_Top_DrMundo(Ratings):
pass
class NA_Khazix_Top_Ekko(Ratings):
pass
class NA_Khazix_Top_Elise(Ratings):
pass
class NA_Khazix_Top_Evelynn(Ratings):
pass
class NA_Khazix_Top_Ezreal(Ratings):
pass
class NA_Khazix_Top_Fiddlesticks(Ratings):
pass
class NA_Khazix_Top_Fiora(Ratings):
pass
class NA_Khazix_Top_Fizz(Ratings):
pass
class NA_Khazix_Top_Galio(Ratings):
pass
class NA_Khazix_Top_Gangplank(Ratings):
pass
class NA_Khazix_Top_Garen(Ratings):
pass
class NA_Khazix_Top_Gnar(Ratings):
pass
class NA_Khazix_Top_Gragas(Ratings):
pass
class NA_Khazix_Top_Graves(Ratings):
pass
class NA_Khazix_Top_Hecarim(Ratings):
pass
class NA_Khazix_Top_Heimerdinger(Ratings):
pass
class NA_Khazix_Top_Illaoi(Ratings):
pass
class NA_Khazix_Top_Irelia(Ratings):
pass
class NA_Khazix_Top_Ivern(Ratings):
pass
class NA_Khazix_Top_Janna(Ratings):
pass
class NA_Khazix_Top_JarvanIV(Ratings):
pass
class NA_Khazix_Top_Jax(Ratings):
pass
class NA_Khazix_Top_Jayce(Ratings):
pass
class NA_Khazix_Top_Jhin(Ratings):
pass
class NA_Khazix_Top_Jinx(Ratings):
pass
class NA_Khazix_Top_Kalista(Ratings):
pass
class NA_Khazix_Top_Karma(Ratings):
pass
class NA_Khazix_Top_Karthus(Ratings):
pass
class NA_Khazix_Top_Kassadin(Ratings):
pass
class NA_Khazix_Top_Katarina(Ratings):
pass
class NA_Khazix_Top_Kayle(Ratings):
pass
class NA_Khazix_Top_Kayn(Ratings):
pass
class NA_Khazix_Top_Kennen(Ratings):
pass
class NA_Khazix_Top_Khazix(Ratings):
pass
class NA_Khazix_Top_Kindred(Ratings):
pass
class NA_Khazix_Top_Kled(Ratings):
pass
class NA_Khazix_Top_KogMaw(Ratings):
pass
class NA_Khazix_Top_Leblanc(Ratings):
pass
class NA_Khazix_Top_LeeSin(Ratings):
pass
class NA_Khazix_Top_Leona(Ratings):
pass
class NA_Khazix_Top_Lissandra(Ratings):
pass
class NA_Khazix_Top_Lucian(Ratings):
pass
class NA_Khazix_Top_Lulu(Ratings):
pass
class NA_Khazix_Top_Lux(Ratings):
pass
class NA_Khazix_Top_Malphite(Ratings):
pass
class NA_Khazix_Top_Malzahar(Ratings):
pass
class NA_Khazix_Top_Maokai(Ratings):
pass
class NA_Khazix_Top_MasterYi(Ratings):
pass
class NA_Khazix_Top_MissFortune(Ratings):
pass
class NA_Khazix_Top_MonkeyKing(Ratings):
pass
class NA_Khazix_Top_Mordekaiser(Ratings):
pass
class NA_Khazix_Top_Morgana(Ratings):
pass
class NA_Khazix_Top_Nami(Ratings):
pass
class NA_Khazix_Top_Nasus(Ratings):
pass
class NA_Khazix_Top_Nautilus(Ratings):
pass
class NA_Khazix_Top_Nidalee(Ratings):
pass
class NA_Khazix_Top_Nocturne(Ratings):
pass
class NA_Khazix_Top_Nunu(Ratings):
pass
class NA_Khazix_Top_Olaf(Ratings):
pass
class NA_Khazix_Top_Orianna(Ratings):
pass
class NA_Khazix_Top_Ornn(Ratings):
pass
class NA_Khazix_Top_Pantheon(Ratings):
pass
class NA_Khazix_Top_Poppy(Ratings):
pass
class NA_Khazix_Top_Quinn(Ratings):
pass
class NA_Khazix_Top_Rakan(Ratings):
pass
class NA_Khazix_Top_Rammus(Ratings):
pass
class NA_Khazix_Top_RekSai(Ratings):
pass
class NA_Khazix_Top_Renekton(Ratings):
pass
class NA_Khazix_Top_Rengar(Ratings):
pass
class NA_Khazix_Top_Riven(Ratings):
pass
class NA_Khazix_Top_Rumble(Ratings):
pass
class NA_Khazix_Top_Ryze(Ratings):
pass
class NA_Khazix_Top_Sejuani(Ratings):
pass
class NA_Khazix_Top_Shaco(Ratings):
pass
class NA_Khazix_Top_Shen(Ratings):
pass
class NA_Khazix_Top_Shyvana(Ratings):
pass
class NA_Khazix_Top_Singed(Ratings):
pass
class NA_Khazix_Top_Sion(Ratings):
pass
class NA_Khazix_Top_Sivir(Ratings):
pass
class NA_Khazix_Top_Skarner(Ratings):
pass
class NA_Khazix_Top_Sona(Ratings):
pass
class NA_Khazix_Top_Soraka(Ratings):
pass
class NA_Khazix_Top_Swain(Ratings):
pass
class NA_Khazix_Top_Syndra(Ratings):
pass
class NA_Khazix_Top_TahmKench(Ratings):
pass
class NA_Khazix_Top_Taliyah(Ratings):
pass
class NA_Khazix_Top_Talon(Ratings):
pass
class NA_Khazix_Top_Taric(Ratings):
pass
class NA_Khazix_Top_Teemo(Ratings):
pass
class NA_Khazix_Top_Thresh(Ratings):
pass
class NA_Khazix_Top_Tristana(Ratings):
pass
class NA_Khazix_Top_Trundle(Ratings):
pass
class NA_Khazix_Top_Tryndamere(Ratings):
pass
class NA_Khazix_Top_TwistedFate(Ratings):
pass
class NA_Khazix_Top_Twitch(Ratings):
pass
class NA_Khazix_Top_Udyr(Ratings):
pass
class NA_Khazix_Top_Urgot(Ratings):
pass
class NA_Khazix_Top_Varus(Ratings):
pass
class NA_Khazix_Top_Vayne(Ratings):
pass
class NA_Khazix_Top_Veigar(Ratings):
pass
class NA_Khazix_Top_Velkoz(Ratings):
pass
class NA_Khazix_Top_Vi(Ratings):
pass
class NA_Khazix_Top_Viktor(Ratings):
pass
class NA_Khazix_Top_Vladimir(Ratings):
pass
class NA_Khazix_Top_Volibear(Ratings):
pass
class NA_Khazix_Top_Warwick(Ratings):
pass
class NA_Khazix_Top_Xayah(Ratings):
pass
class NA_Khazix_Top_Xerath(Ratings):
pass
class NA_Khazix_Top_XinZhao(Ratings):
pass
class NA_Khazix_Top_Yasuo(Ratings):
pass
class NA_Khazix_Top_Yorick(Ratings):
pass
class NA_Khazix_Top_Zac(Ratings):
pass
class NA_Khazix_Top_Zed(Ratings):
pass
class NA_Khazix_Top_Ziggs(Ratings):
pass
class NA_Khazix_Top_Zilean(Ratings):
pass
class NA_Khazix_Top_Zyra(Ratings):
pass
| 15.695444
| 46
| 0.766692
| 972
| 6,545
| 4.736626
| 0.151235
| 0.209818
| 0.389661
| 0.479583
| 0.803432
| 0.803432
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169748
| 6,545
| 416
| 47
| 15.733173
| 0.847258
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
3d189022514ffa92e24cccd1441a05b0577b4e2e
| 2,169
|
py
|
Python
|
tests/test_pyhive_runBCFTools_VC.py
|
elowy01/igsr_analysis
|
ffea4885227c2299f886a4f41e70b6e1f6bb43da
|
[
"Apache-2.0"
] | 3
|
2018-04-20T15:04:34.000Z
|
2022-03-30T06:36:02.000Z
|
tests/test_pyhive_runBCFTools_VC.py
|
elowy01/igsr_analysis
|
ffea4885227c2299f886a4f41e70b6e1f6bb43da
|
[
"Apache-2.0"
] | 7
|
2019-06-06T09:22:20.000Z
|
2021-11-23T17:41:52.000Z
|
tests/test_pyhive_runBCFTools_VC.py
|
elowy01/igsr_analysis
|
ffea4885227c2299f886a4f41e70b6e1f6bb43da
|
[
"Apache-2.0"
] | 5
|
2017-11-02T11:17:35.000Z
|
2021-12-11T19:34:09.000Z
|
import os
import subprocess
import glob
import pytest
# test_pyhive_runBCFTools_VC.py
def test_runBCFTools_VC(bcftools_folder, hive_dir, datadir, clean_tmp):
"""
Test function to run BCFTools mpileup|call on a BAM file
"""
bam_file = "{0}/exampleBAM.bam".format(datadir)
reference = "{0}/exampleFASTA.fasta".format(datadir)
work_dir = "{0}/outdir/".format(datadir)
annots = "\"['DP','SP','AD']\""
command = "perl {0}/scripts/standaloneJob.pl PyHive.VariantCalling.BCFTools_caller -language python3 \
-outprefix {1} -work_dir {2} -chunk {3} -bam {4} -reference {5} \
-bcftools_folder {6} -annots {7} -verbose True".format(hive_dir, 'out', work_dir,
"\"['chr1','10000','30000']\"", bam_file,
reference, bcftools_folder, annots)
try:
subprocess.check_output(command, shell=True)
assert True
except subprocess.CalledProcessError as exc:
assert False
raise Exception(exc.output)
def test_runBCFTools_VC_woptions(bcftools_folder, hive_dir, datadir, clean_tmp):
"""
Test function to run BCFTools mpileup|call on a BAM file
using some options and arguments
"""
bam_file = "{0}/exampleBAM.bam".format(datadir)
reference = "{0}/exampleFASTA.fasta".format(datadir)
work_dir = "{0}/outdir/".format(datadir)
annots = "\"['DP','SP','AD']\""
command = "perl {0}/scripts/standaloneJob.pl PyHive.VariantCalling.BCFTools_caller -language python3 \
-outprefix {1} -work_dir {2} -chunk {3} -bam {4} -reference {5} \
-bcftools_folder {6} -annots {7} -E 1 -p 1 -m_pileup 3 -m_call 1 -v 1 " \
"-F 0.05 -C 25 -verbose True".format(hive_dir, 'out', work_dir,
"\"['chr1','10000','30000']\"", bam_file,
reference, bcftools_folder, annots)
try:
subprocess.check_output(command, shell=True)
assert True
except subprocess.CalledProcessError as exc:
assert False
raise Exception(exc.output)
| 38.052632
| 106
| 0.599355
| 256
| 2,169
| 4.9375
| 0.351563
| 0.066456
| 0.028481
| 0.031646
| 0.862342
| 0.862342
| 0.862342
| 0.862342
| 0.862342
| 0.862342
| 0
| 0.035421
| 0.271093
| 2,169
| 56
| 107
| 38.732143
| 0.764073
| 0.081604
| 0
| 0.756757
| 0
| 0.081081
| 0.091467
| 0.022483
| 0
| 0
| 0
| 0
| 0.108108
| 1
| 0.054054
| false
| 0
| 0.108108
| 0
| 0.162162
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d479358107ba6396633f05381cdd46111709044
| 37,605
|
py
|
Python
|
rbac/common/protobuf/task_transaction_pb2.py
|
knagware9/sawtooth-next-directory
|
be80852e08d2b27e105d964c727509f2a974002d
|
[
"Apache-2.0"
] | 1
|
2019-04-14T20:16:59.000Z
|
2019-04-14T20:16:59.000Z
|
rbac/common/protobuf/task_transaction_pb2.py
|
crazyrex/sawtooth-next-directory
|
210b581c8c92c307fab2f6d2b9a55526b56b790a
|
[
"Apache-2.0"
] | null | null | null |
rbac/common/protobuf/task_transaction_pb2.py
|
crazyrex/sawtooth-next-directory
|
210b581c8c92c307fab2f6d2b9a55526b56b790a
|
[
"Apache-2.0"
] | 1
|
2018-12-07T10:55:08.000Z
|
2018-12-07T10:55:08.000Z
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: task_transaction.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='task_transaction.proto',
package='',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x16task_transaction.proto\"n\n\x13ProposeAddTaskOwner\x12\x13\n\x0bproposal_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x0e\n\x06reason\x18\x04 \x01(\t\x12\x10\n\x08metadata\x18\x05 \x01(\t\"q\n\x16ProposeRemoveTaskOwner\x12\x13\n\x0bproposal_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x0e\n\x06reason\x18\x04 \x01(\t\x12\x10\n\x08metadata\x18\x05 \x01(\t\"n\n\x13ProposeAddTaskAdmin\x12\x13\n\x0bproposal_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x0e\n\x06reason\x18\x04 \x01(\t\x12\x10\n\x08metadata\x18\x05 \x01(\t\"q\n\x16ProposeRemoveTaskAdmin\x12\x13\n\x0bproposal_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x0e\n\x06reason\x18\x04 \x01(\t\x12\x10\n\x08metadata\x18\x05 \x01(\t\"\\\n\x13\x43onfirmAddTaskOwner\x12\x13\n\x0bproposal_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x0e\n\x06reason\x18\x04 \x01(\t\"_\n\x16\x43onfirmRemoveTaskOwner\x12\x13\n\x0bproposal_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x0e\n\x06reason\x18\x04 \x01(\t\"\\\n\x13\x43onfirmAddTaskAdmin\x12\x13\n\x0bproposal_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x0e\n\x06reason\x18\x04 \x01(\t\"_\n\x16\x43onfirmRemoveTaskAdmin\x12\x13\n\x0bproposal_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x0e\n\x06reason\x18\x04 \x01(\t\"[\n\x12RejectAddTaskOwner\x12\x13\n\x0bproposal_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x0e\n\x06reason\x18\x04 \x01(\t\"^\n\x15RejectRemoveTaskOwner\x12\x13\n\x0bproposal_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x0e\n\x06reason\x18\x04 \x01(\t\"[\n\x12RejectAddTaskAdmin\x12\x13\n\x0bproposal_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x0e\n\x06reason\x18\x04 \x01(\t\"^\n\x15RejectRemoveTaskAdmin\x12\x13\n\x0bproposal_id\x18\x01 \x01(\t\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x0e\n\x06reason\x18\x04 \x01(\t\"]\n\nCreateTask\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x64mins\x18\x03 \x03(\t\x12\x0e\n\x06owners\x18\x04 \x03(\t\x12\x10\n\x08metadata\x18\x05 \x01(\t\"b\n\nUpdateTask\x12\x0f\n\x07task_id\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\x12\x1b\n\x13old_metadata_sha512\x18\x03 \x01(\t\x12\x14\n\x0cnew_metadata\x18\x04 \x01(\tb\x06proto3')
)
_PROPOSEADDTASKOWNER = _descriptor.Descriptor(
name='ProposeAddTaskOwner',
full_name='ProposeAddTaskOwner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proposal_id', full_name='ProposeAddTaskOwner.proposal_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='ProposeAddTaskOwner.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='ProposeAddTaskOwner.user_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reason', full_name='ProposeAddTaskOwner.reason', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='metadata', full_name='ProposeAddTaskOwner.metadata', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=26,
serialized_end=136,
)
_PROPOSEREMOVETASKOWNER = _descriptor.Descriptor(
name='ProposeRemoveTaskOwner',
full_name='ProposeRemoveTaskOwner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proposal_id', full_name='ProposeRemoveTaskOwner.proposal_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='ProposeRemoveTaskOwner.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='ProposeRemoveTaskOwner.user_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reason', full_name='ProposeRemoveTaskOwner.reason', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='metadata', full_name='ProposeRemoveTaskOwner.metadata', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=138,
serialized_end=251,
)
_PROPOSEADDTASKADMIN = _descriptor.Descriptor(
name='ProposeAddTaskAdmin',
full_name='ProposeAddTaskAdmin',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proposal_id', full_name='ProposeAddTaskAdmin.proposal_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='ProposeAddTaskAdmin.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='ProposeAddTaskAdmin.user_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reason', full_name='ProposeAddTaskAdmin.reason', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='metadata', full_name='ProposeAddTaskAdmin.metadata', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=253,
serialized_end=363,
)
_PROPOSEREMOVETASKADMIN = _descriptor.Descriptor(
name='ProposeRemoveTaskAdmin',
full_name='ProposeRemoveTaskAdmin',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proposal_id', full_name='ProposeRemoveTaskAdmin.proposal_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='ProposeRemoveTaskAdmin.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='ProposeRemoveTaskAdmin.user_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reason', full_name='ProposeRemoveTaskAdmin.reason', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='metadata', full_name='ProposeRemoveTaskAdmin.metadata', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=365,
serialized_end=478,
)
_CONFIRMADDTASKOWNER = _descriptor.Descriptor(
name='ConfirmAddTaskOwner',
full_name='ConfirmAddTaskOwner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proposal_id', full_name='ConfirmAddTaskOwner.proposal_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='ConfirmAddTaskOwner.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='ConfirmAddTaskOwner.user_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reason', full_name='ConfirmAddTaskOwner.reason', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=480,
serialized_end=572,
)
_CONFIRMREMOVETASKOWNER = _descriptor.Descriptor(
name='ConfirmRemoveTaskOwner',
full_name='ConfirmRemoveTaskOwner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proposal_id', full_name='ConfirmRemoveTaskOwner.proposal_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='ConfirmRemoveTaskOwner.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='ConfirmRemoveTaskOwner.user_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reason', full_name='ConfirmRemoveTaskOwner.reason', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=574,
serialized_end=669,
)
_CONFIRMADDTASKADMIN = _descriptor.Descriptor(
name='ConfirmAddTaskAdmin',
full_name='ConfirmAddTaskAdmin',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proposal_id', full_name='ConfirmAddTaskAdmin.proposal_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='ConfirmAddTaskAdmin.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='ConfirmAddTaskAdmin.user_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reason', full_name='ConfirmAddTaskAdmin.reason', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=671,
serialized_end=763,
)
_CONFIRMREMOVETASKADMIN = _descriptor.Descriptor(
name='ConfirmRemoveTaskAdmin',
full_name='ConfirmRemoveTaskAdmin',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proposal_id', full_name='ConfirmRemoveTaskAdmin.proposal_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='ConfirmRemoveTaskAdmin.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='ConfirmRemoveTaskAdmin.user_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reason', full_name='ConfirmRemoveTaskAdmin.reason', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=765,
serialized_end=860,
)
_REJECTADDTASKOWNER = _descriptor.Descriptor(
name='RejectAddTaskOwner',
full_name='RejectAddTaskOwner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proposal_id', full_name='RejectAddTaskOwner.proposal_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='RejectAddTaskOwner.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='RejectAddTaskOwner.user_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reason', full_name='RejectAddTaskOwner.reason', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=862,
serialized_end=953,
)
_REJECTREMOVETASKOWNER = _descriptor.Descriptor(
name='RejectRemoveTaskOwner',
full_name='RejectRemoveTaskOwner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proposal_id', full_name='RejectRemoveTaskOwner.proposal_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='RejectRemoveTaskOwner.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='RejectRemoveTaskOwner.user_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reason', full_name='RejectRemoveTaskOwner.reason', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=955,
serialized_end=1049,
)
_REJECTADDTASKADMIN = _descriptor.Descriptor(
name='RejectAddTaskAdmin',
full_name='RejectAddTaskAdmin',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proposal_id', full_name='RejectAddTaskAdmin.proposal_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='RejectAddTaskAdmin.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='RejectAddTaskAdmin.user_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reason', full_name='RejectAddTaskAdmin.reason', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1051,
serialized_end=1142,
)
_REJECTREMOVETASKADMIN = _descriptor.Descriptor(
name='RejectRemoveTaskAdmin',
full_name='RejectRemoveTaskAdmin',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='proposal_id', full_name='RejectRemoveTaskAdmin.proposal_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='RejectRemoveTaskAdmin.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='RejectRemoveTaskAdmin.user_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reason', full_name='RejectRemoveTaskAdmin.reason', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1144,
serialized_end=1238,
)
_CREATETASK = _descriptor.Descriptor(
name='CreateTask',
full_name='CreateTask',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='task_id', full_name='CreateTask.task_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='CreateTask.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='admins', full_name='CreateTask.admins', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='owners', full_name='CreateTask.owners', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='metadata', full_name='CreateTask.metadata', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1240,
serialized_end=1333,
)
_UPDATETASK = _descriptor.Descriptor(
name='UpdateTask',
full_name='UpdateTask',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='task_id', full_name='UpdateTask.task_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='new_name', full_name='UpdateTask.new_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='old_metadata_sha512', full_name='UpdateTask.old_metadata_sha512', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='new_metadata', full_name='UpdateTask.new_metadata', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1335,
serialized_end=1433,
)
DESCRIPTOR.message_types_by_name['ProposeAddTaskOwner'] = _PROPOSEADDTASKOWNER
DESCRIPTOR.message_types_by_name['ProposeRemoveTaskOwner'] = _PROPOSEREMOVETASKOWNER
DESCRIPTOR.message_types_by_name['ProposeAddTaskAdmin'] = _PROPOSEADDTASKADMIN
DESCRIPTOR.message_types_by_name['ProposeRemoveTaskAdmin'] = _PROPOSEREMOVETASKADMIN
DESCRIPTOR.message_types_by_name['ConfirmAddTaskOwner'] = _CONFIRMADDTASKOWNER
DESCRIPTOR.message_types_by_name['ConfirmRemoveTaskOwner'] = _CONFIRMREMOVETASKOWNER
DESCRIPTOR.message_types_by_name['ConfirmAddTaskAdmin'] = _CONFIRMADDTASKADMIN
DESCRIPTOR.message_types_by_name['ConfirmRemoveTaskAdmin'] = _CONFIRMREMOVETASKADMIN
DESCRIPTOR.message_types_by_name['RejectAddTaskOwner'] = _REJECTADDTASKOWNER
DESCRIPTOR.message_types_by_name['RejectRemoveTaskOwner'] = _REJECTREMOVETASKOWNER
DESCRIPTOR.message_types_by_name['RejectAddTaskAdmin'] = _REJECTADDTASKADMIN
DESCRIPTOR.message_types_by_name['RejectRemoveTaskAdmin'] = _REJECTREMOVETASKADMIN
DESCRIPTOR.message_types_by_name['CreateTask'] = _CREATETASK
DESCRIPTOR.message_types_by_name['UpdateTask'] = _UPDATETASK
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ProposeAddTaskOwner = _reflection.GeneratedProtocolMessageType('ProposeAddTaskOwner', (_message.Message,), dict(
DESCRIPTOR = _PROPOSEADDTASKOWNER,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:ProposeAddTaskOwner)
))
_sym_db.RegisterMessage(ProposeAddTaskOwner)
ProposeRemoveTaskOwner = _reflection.GeneratedProtocolMessageType('ProposeRemoveTaskOwner', (_message.Message,), dict(
DESCRIPTOR = _PROPOSEREMOVETASKOWNER,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:ProposeRemoveTaskOwner)
))
_sym_db.RegisterMessage(ProposeRemoveTaskOwner)
ProposeAddTaskAdmin = _reflection.GeneratedProtocolMessageType('ProposeAddTaskAdmin', (_message.Message,), dict(
DESCRIPTOR = _PROPOSEADDTASKADMIN,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:ProposeAddTaskAdmin)
))
_sym_db.RegisterMessage(ProposeAddTaskAdmin)
ProposeRemoveTaskAdmin = _reflection.GeneratedProtocolMessageType('ProposeRemoveTaskAdmin', (_message.Message,), dict(
DESCRIPTOR = _PROPOSEREMOVETASKADMIN,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:ProposeRemoveTaskAdmin)
))
_sym_db.RegisterMessage(ProposeRemoveTaskAdmin)
ConfirmAddTaskOwner = _reflection.GeneratedProtocolMessageType('ConfirmAddTaskOwner', (_message.Message,), dict(
DESCRIPTOR = _CONFIRMADDTASKOWNER,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:ConfirmAddTaskOwner)
))
_sym_db.RegisterMessage(ConfirmAddTaskOwner)
ConfirmRemoveTaskOwner = _reflection.GeneratedProtocolMessageType('ConfirmRemoveTaskOwner', (_message.Message,), dict(
DESCRIPTOR = _CONFIRMREMOVETASKOWNER,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:ConfirmRemoveTaskOwner)
))
_sym_db.RegisterMessage(ConfirmRemoveTaskOwner)
ConfirmAddTaskAdmin = _reflection.GeneratedProtocolMessageType('ConfirmAddTaskAdmin', (_message.Message,), dict(
DESCRIPTOR = _CONFIRMADDTASKADMIN,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:ConfirmAddTaskAdmin)
))
_sym_db.RegisterMessage(ConfirmAddTaskAdmin)
ConfirmRemoveTaskAdmin = _reflection.GeneratedProtocolMessageType('ConfirmRemoveTaskAdmin', (_message.Message,), dict(
DESCRIPTOR = _CONFIRMREMOVETASKADMIN,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:ConfirmRemoveTaskAdmin)
))
_sym_db.RegisterMessage(ConfirmRemoveTaskAdmin)
RejectAddTaskOwner = _reflection.GeneratedProtocolMessageType('RejectAddTaskOwner', (_message.Message,), dict(
DESCRIPTOR = _REJECTADDTASKOWNER,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:RejectAddTaskOwner)
))
_sym_db.RegisterMessage(RejectAddTaskOwner)
RejectRemoveTaskOwner = _reflection.GeneratedProtocolMessageType('RejectRemoveTaskOwner', (_message.Message,), dict(
DESCRIPTOR = _REJECTREMOVETASKOWNER,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:RejectRemoveTaskOwner)
))
_sym_db.RegisterMessage(RejectRemoveTaskOwner)
RejectAddTaskAdmin = _reflection.GeneratedProtocolMessageType('RejectAddTaskAdmin', (_message.Message,), dict(
DESCRIPTOR = _REJECTADDTASKADMIN,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:RejectAddTaskAdmin)
))
_sym_db.RegisterMessage(RejectAddTaskAdmin)
RejectRemoveTaskAdmin = _reflection.GeneratedProtocolMessageType('RejectRemoveTaskAdmin', (_message.Message,), dict(
DESCRIPTOR = _REJECTREMOVETASKADMIN,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:RejectRemoveTaskAdmin)
))
_sym_db.RegisterMessage(RejectRemoveTaskAdmin)
CreateTask = _reflection.GeneratedProtocolMessageType('CreateTask', (_message.Message,), dict(
DESCRIPTOR = _CREATETASK,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:CreateTask)
))
_sym_db.RegisterMessage(CreateTask)
UpdateTask = _reflection.GeneratedProtocolMessageType('UpdateTask', (_message.Message,), dict(
DESCRIPTOR = _UPDATETASK,
__module__ = 'task_transaction_pb2'
# @@protoc_insertion_point(class_scope:UpdateTask)
))
_sym_db.RegisterMessage(UpdateTask)
# @@protoc_insertion_point(module_scope)
| 41.506623
| 2,776
| 0.737216
| 4,686
| 37,605
| 5.627828
| 0.040973
| 0.05976
| 0.060519
| 0.027757
| 0.740937
| 0.724291
| 0.724291
| 0.724291
| 0.724291
| 0.694525
| 0
| 0.037179
| 0.130967
| 37,605
| 905
| 2,777
| 41.552486
| 0.769798
| 0.025741
| 0
| 0.741007
| 1
| 0.001199
| 0.181823
| 0.130428
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005995
| 0
| 0.005995
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
181ed57e3eb39153ad141aa8f03aeb15ee7f7127
| 510
|
py
|
Python
|
idManager/view/authentication_view.py
|
lgarciasbr/idm-api
|
3517d29d55eb2a06fb5b4b21359b6cf6d11529a0
|
[
"Apache-2.0"
] | 2
|
2018-01-14T22:43:43.000Z
|
2018-01-14T22:43:48.000Z
|
idManager/view/authentication_view.py
|
lgarciasbr/idm-api
|
3517d29d55eb2a06fb5b4b21359b6cf6d11529a0
|
[
"Apache-2.0"
] | null | null | null |
idManager/view/authentication_view.py
|
lgarciasbr/idm-api
|
3517d29d55eb2a06fb5b4b21359b6cf6d11529a0
|
[
"Apache-2.0"
] | null | null | null |
from flask import jsonify
def auth_login(http_status_code, message, token):
view = jsonify({'status_code': http_status_code, 'message': message, '_token': token})
return view
def auth_is_valid(http_status_code, message, token):
view = jsonify({'status_code': http_status_code, 'message': message, '_token': token})
return view
def auth_logout(http_status_code, message, token):
view = jsonify({'status_code': http_status_code, 'message': message, '_token': token})
return view
| 25.5
| 90
| 0.721569
| 68
| 510
| 5.088235
| 0.25
| 0.260116
| 0.242775
| 0.364162
| 0.864162
| 0.864162
| 0.864162
| 0.864162
| 0.864162
| 0.864162
| 0
| 0
| 0.154902
| 510
| 19
| 91
| 26.842105
| 0.802784
| 0
| 0
| 0.6
| 0
| 0
| 0.141176
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0
| 0.1
| 0
| 0.7
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
184e8888d3aeff144a6fa7390d4e574c4fcd9c17
| 18,542
|
py
|
Python
|
pytests/tuqquery/tuq_tokens.py
|
ramalingam-cb/testrunner
|
81cea7a5a493cf0c67fca7f97c667cd3c6ad2142
|
[
"Apache-2.0"
] | null | null | null |
pytests/tuqquery/tuq_tokens.py
|
ramalingam-cb/testrunner
|
81cea7a5a493cf0c67fca7f97c667cd3c6ad2142
|
[
"Apache-2.0"
] | null | null | null |
pytests/tuqquery/tuq_tokens.py
|
ramalingam-cb/testrunner
|
81cea7a5a493cf0c67fca7f97c667cd3c6ad2142
|
[
"Apache-2.0"
] | null | null | null |
from lib.remote.remote_util import RemoteMachineShellConnection
from pytests.tuqquery.tuq import QueryTests
class TokenTests(QueryTests):
def setUp(self):
if not self._testMethodName == 'suite_setUp':
self.skip_buckets_handle = True
super(TokenTests, self).setUp()
self.n1ql_port = self.input.param("n1ql_port", 8093)
self.scan_consistency = self.input.param("scan_consistency", 'REQUEST_PLUS')
def tearDown(self):
server = self.master
shell = RemoteMachineShellConnection(server)
# shell.execute_command("""curl -X DELETE -u Administrator:password http://{0}:8091/pools/default/buckets/beer-sample""".format(server.ip))
self.sleep(20)
super(TokenTests, self).tearDown()
def test_tokens_secondary_indexes(self):
self.rest.load_sample("beer-sample")
self.sleep(20)
created_indexes = []
self.query = 'create primary index on `beer-sample`'
self.run_cbq_query()
self.query = 'create index idx1 on `beer-sample`(description,name )'
self.run_cbq_query()
self.query = 'create index idx2 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx3 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"lower","names":true,"specials":false}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx4 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"upper","names":false,"specials":true}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx5 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"upper","names":false}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx6 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"upper"}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx7 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx8 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"":""}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx9 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"random"}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx10 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"names":"random"}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx11 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"specials":"random"}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx12 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description) END )'
self.run_cbq_query()
self.query = 'create index idx13 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"lower"}) END )'
self.run_cbq_query()
self.query = 'create index idx14 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"upper"}) END )'
self.run_cbq_query()
self.query = 'create index idx15 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"lower","names":true,"specials":false}) END )'
self.run_cbq_query()
self.query = 'create index idx16 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"upper","names":false,"specials":true}) END )'
self.run_cbq_query()
self.query = 'create index idx17 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"upper","names":false}) END )'
self.run_cbq_query()
self.query = 'create index idx18 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{}) END )'
self.run_cbq_query()
self.query = 'create index idx19 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"":""}) END )'
self.run_cbq_query()
self.query = 'create index idx20 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"random"}) END )'
self.run_cbq_query()
self.query = 'create index idx21 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"names":"random"}) END )'
self.run_cbq_query()
self.query = 'create index idx22 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"specials":"random"}) END )'
self.run_cbq_query()
for i in xrange(1,22):
index = 'idx{0}'.format(i)
created_indexes.append(index)
self.query = 'explain select name from `beer-sample` where any v in tokens(description) satisfies v = "golden" END limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue(actual_result['results'])
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx2")
self.assertTrue(str(plan['~children'][0]['~children'][0]['scan']['covers'][0]) == ('cover ((distinct (array `v` for `v` in tokens((`beer-sample`.`description`)) end)))'))
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(reverse(description)) satisfies v = "nedlog" END order by meta().id limit 10'
expected_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` where any v in tokens(reverse(description)) satisfies v = "nedlog" END order by meta().id limit 10'
actual_result = self.run_cbq_query()
#self.assertTrue(str(actual_result['results'])=="[{u'name': u'21A IPA'}, {u'name': u'Amendment Pale Ale'}, {u'name': u'Double Trouble IPA'}, {u'name': u'South Park Blonde'}, {u'name': u'Restoration Pale Ale'}, {u'name': u'S.O.S'}, {u'name': u'Satsuma Harvest Wit'}, {u'name': u'Adnams Explorer'}, {u'name': u'Shock Top'}, {u'name': u'Anniversary Maibock'}]" )
self.assertTrue((actual_result['results'])== (expected_result['results']))
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{"case":"lower","names":true,"specials":false}) satisfies v = "brewery" END limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(str(plan['~children'][0]['~children'][0]['scan']['covers'][0]) == ('cover ((distinct (array `v` for `v` in tokens((`beer-sample`.`description`), {"case": "lower", "names": true, "specials": false}) end)))'))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx3")
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{"case":"lower","names":true,"specials":false}) satisfies v = "brewery" END order by meta().id limit 10'
expected_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`idx15`) where any v in tokens(description,{"case":"lower","names":true,"specials":false}) satisfies v = "brewery" END order by meta().id limit 10'
actual_result = self.run_cbq_query()
self.assertTrue((actual_result['results'])== (expected_result['results']) )
self.query = 'explain select name from `beer-sample` use index(`idx14`) where any v in tokens(description,{"case":"upper","names":false,"specials":true}) satisfies v = "BREWERY" END order by meta().id limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(str(plan['~children'][0]['~children'][0]['scan']['covers'][0]) == ('cover ((distinct (array `v` for `v` in tokens((`beer-sample`.`description`), {"case": "upper", "names": false, "specials": true}) end)))'))
self.assertTrue(str(plan['~children'][0]['~children'][0]['scan']['index']) == "idx4")
self.query = 'select name from `beer-sample` use index(`idx16`) where any v in tokens(description,{"case":"upper","names":false,"specials":true}) satisfies v = "BREWERY" END order by meta().id limit 10'
actual_result = self.run_cbq_query()
self.assertTrue((actual_result['results'])== (expected_result['results']))
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{"case":"upper","names":false}) satisfies v = "GOLDEN" END limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx5")
self.query = 'select name from `beer-sample` use index(`idx17`) where any v in tokens(description,{"case":"upper","names":false}) satisfies v = "GOLDEN" END limit 10'
actual_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{"case":"upper","names":false}) satisfies v = "GOLDEN" END limit 10'
expected_result = self.run_cbq_query()
self.assertTrue(actual_result['results']==expected_result['results'])
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{}) satisfies v = "golden" END limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx7")
self.query = 'select name from `beer-sample` use index(`idx18`) where any v in tokens(description,{}) satisfies v = "golden" END limit 10'
actual_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{}) satisfies v = "golden" END limit 10'
expected_result = self.run_cbq_query()
self.assertTrue(actual_result['results']==expected_result['results'])
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{"":""}) satisfies v = "golden" END limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx8")
self.query = 'select name from `beer-sample` use index(`idx19`) where any v in tokens(description,{"":""}) satisfies v = "golden" END order by name '
actual_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{"":""}) satisfies v = "golden" END order by name '
expected_result = self.run_cbq_query()
self.assertTrue(actual_result['results']==expected_result['results'])
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{"case":"random"}) satisfies v = "golden" END '
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['scan']['index'] == "idx9")
self.query = 'select name from `beer-sample` use index(`idx20`) where any v in tokens(description,{"case":"random"}) satisfies v = "golden" END order by name '
actual_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{"case":"random"}) satisfies v = "golden" END order by name '
expected_result = self.run_cbq_query()
self.assertTrue(actual_result['results']==expected_result['results'])
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{"specials":"random"}) satisfies v = "brewery" END order by name'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx11")
self.query = 'select name from `beer-sample` use index(`idx22`) where any v in tokens(description,{"specials":"random"}) satisfies v = "golden" END order by name'
actual_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{"specials":"random"}) satisfies v = "golden" END order by name'
expected_result = self.run_cbq_query()
self.assertTrue(actual_result['results']==expected_result['results'])
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{"names":"random"}) satisfies v = "brewery" END limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx10")
self.query = 'select name from `beer-sample` use index(`idx21`) where any v in tokens(description,{"names":"random"}) satisfies v = "golden" END limit 10'
actual_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{"names":"random"}) satisfies v = "golden" END limit 10'
expected_result = self.run_cbq_query()
self.assertTrue(actual_result['results']==expected_result['results'])
for idx in created_indexes:
self.query = "DROP INDEX %s.%s USING %s" % ("`beer-sample`", idx, self.index_type)
actual_result = self.run_cbq_query()
'''This test is specific to beer-sample bucket'''
def test_tokens_simple_syntax(self):
self.rest.load_sample("beer-sample")
bucket_doc_map = {"beer-sample": 7303}
bucket_status_map = {"beer-sample": "healthy"}
self.wait_for_buckets_status(bucket_status_map, 5, 120)
self.wait_for_bucket_docs(bucket_doc_map, 5, 120)
self._wait_for_index_online("beer-sample", "beer_primary")
self.sleep(10)
created_indexes = []
try:
idx1 = "idx_suffixes"
idx2 = "idx_tokens"
idx3 = "idx_pairs"
idx4 = "idx_addresses"
self.query = 'CREATE INDEX {0} ON `beer-sample`( DISTINCT SUFFIXES( name ) )'.format(idx1)
self.run_cbq_query()
self._wait_for_index_online("beer-sample", "beer_primary")
created_indexes.append(idx1)
self.query = "explain select * from `beer-sample` where name like '%Cafe%'"
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertEqual(plan['~children'][0]['scan']['index'], idx1)
self.query = 'CREATE INDEX {0} ON `beer-sample`( DISTINCT TOKENS( description ) )'.format(idx2)
self.run_cbq_query()
self._wait_for_index_online("beer-sample", "beer_primary")
created_indexes.append(idx2)
self.query = "explain select * from `beer-sample` where contains_token(description,'Great')"
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertEqual(plan['~children'][0]['scan']['index'], idx2)
self.query = "CREATE INDEX {0} ON `beer-sample`( DISTINCT PAIRS( SELF ) )".format(idx3)
self.run_cbq_query()
self._wait_for_index_online("beer-sample", "beer_primary")
created_indexes.append(idx3)
self.query = "explain select * from `beer-sample` where name like 'A%' and abv > 6"
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("idx_suffixes" in str(plan))
self.query = "CREATE INDEX {0} ON `beer-sample`( ALL address )".format(idx4)
self.run_cbq_query()
self._wait_for_index_online("beer-sample", "beer_primary")
created_indexes.append(idx4)
self.query = "explain select min(addr) from `beer-sample` unnest address as addr"
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertEqual(plan['~children'][0]['index'], idx4)
self.query = "explain select count(a) from `beer-sample` unnest address as a"
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertEqual(plan['~children'][0]['index'], idx4)
self.query = "explain select * from `beer-sample` where any place in address satisfies " \
"place LIKE '100 %' end"
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue(idx4 in str(plan))
self.assertTrue(idx3 in str(plan))
finally:
for idx in created_indexes:
self.query = "DROP INDEX `beer-sample`.%s" % (idx)
self.run_cbq_query()
self.rest.delete_bucket("beer-sample")
def test_dynamicindex_limit(self):
self.rest.load_sample("beer-sample")
self.sleep(20)
created_indexes = []
try:
idx1 = "idx_abv"
idx2 = "dynamic"
self.query = "CREATE INDEX idx_abv ON `beer-sample`( abv )"
self.run_cbq_query()
created_indexes.append(idx1)
self.query = "CREATE INDEX dynamic ON `beer-sample`( DISTINCT PAIRS( SELF ) )"
self.run_cbq_query()
created_indexes.append(idx2)
self.query = "Explain select * from `beer-sample` where abv > 5 LIMIT 10"
res = self.run_cbq_query()
plan = self.ExplainPlanHelper(res)
self.assertTrue(plan['~children'][0]['~children'][0]['limit']=='10')
finally:
for idx in created_indexes:
self.query = "DROP INDEX `beer-sample`.%s" % ( idx)
self.run_cbq_query()
| 65.059649
| 367
| 0.642595
| 2,416
| 18,542
| 4.814983
| 0.086507
| 0.06963
| 0.055876
| 0.083813
| 0.85842
| 0.844752
| 0.827989
| 0.81329
| 0.805811
| 0.741425
| 0
| 0.013943
| 0.210927
| 18,542
| 284
| 368
| 65.288732
| 0.78115
| 0.026696
| 0
| 0.516393
| 0
| 0.204918
| 0.475325
| 0.111648
| 0
| 0
| 0
| 0
| 0.159836
| 1
| 0.020492
| false
| 0
| 0.008197
| 0
| 0.032787
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1003f2195e718d7338e4e93046ad32eab667f13
| 6,545
|
py
|
Python
|
loldib/getratings/models/NA/na_rengar/na_rengar_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_rengar/na_rengar_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_rengar/na_rengar_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Rengar_Mid_Aatrox(Ratings):
pass
class NA_Rengar_Mid_Ahri(Ratings):
pass
class NA_Rengar_Mid_Akali(Ratings):
pass
class NA_Rengar_Mid_Alistar(Ratings):
pass
class NA_Rengar_Mid_Amumu(Ratings):
pass
class NA_Rengar_Mid_Anivia(Ratings):
pass
class NA_Rengar_Mid_Annie(Ratings):
pass
class NA_Rengar_Mid_Ashe(Ratings):
pass
class NA_Rengar_Mid_AurelionSol(Ratings):
pass
class NA_Rengar_Mid_Azir(Ratings):
pass
class NA_Rengar_Mid_Bard(Ratings):
pass
class NA_Rengar_Mid_Blitzcrank(Ratings):
pass
class NA_Rengar_Mid_Brand(Ratings):
pass
class NA_Rengar_Mid_Braum(Ratings):
pass
class NA_Rengar_Mid_Caitlyn(Ratings):
pass
class NA_Rengar_Mid_Camille(Ratings):
pass
class NA_Rengar_Mid_Cassiopeia(Ratings):
pass
class NA_Rengar_Mid_Chogath(Ratings):
pass
class NA_Rengar_Mid_Corki(Ratings):
pass
class NA_Rengar_Mid_Darius(Ratings):
pass
class NA_Rengar_Mid_Diana(Ratings):
pass
class NA_Rengar_Mid_Draven(Ratings):
pass
class NA_Rengar_Mid_DrMundo(Ratings):
pass
class NA_Rengar_Mid_Ekko(Ratings):
pass
class NA_Rengar_Mid_Elise(Ratings):
pass
class NA_Rengar_Mid_Evelynn(Ratings):
pass
class NA_Rengar_Mid_Ezreal(Ratings):
pass
class NA_Rengar_Mid_Fiddlesticks(Ratings):
pass
class NA_Rengar_Mid_Fiora(Ratings):
pass
class NA_Rengar_Mid_Fizz(Ratings):
pass
class NA_Rengar_Mid_Galio(Ratings):
pass
class NA_Rengar_Mid_Gangplank(Ratings):
pass
class NA_Rengar_Mid_Garen(Ratings):
pass
class NA_Rengar_Mid_Gnar(Ratings):
pass
class NA_Rengar_Mid_Gragas(Ratings):
pass
class NA_Rengar_Mid_Graves(Ratings):
pass
class NA_Rengar_Mid_Hecarim(Ratings):
pass
class NA_Rengar_Mid_Heimerdinger(Ratings):
pass
class NA_Rengar_Mid_Illaoi(Ratings):
pass
class NA_Rengar_Mid_Irelia(Ratings):
pass
class NA_Rengar_Mid_Ivern(Ratings):
pass
class NA_Rengar_Mid_Janna(Ratings):
pass
class NA_Rengar_Mid_JarvanIV(Ratings):
pass
class NA_Rengar_Mid_Jax(Ratings):
pass
class NA_Rengar_Mid_Jayce(Ratings):
pass
class NA_Rengar_Mid_Jhin(Ratings):
pass
class NA_Rengar_Mid_Jinx(Ratings):
pass
class NA_Rengar_Mid_Kalista(Ratings):
pass
class NA_Rengar_Mid_Karma(Ratings):
pass
class NA_Rengar_Mid_Karthus(Ratings):
pass
class NA_Rengar_Mid_Kassadin(Ratings):
pass
class NA_Rengar_Mid_Katarina(Ratings):
pass
class NA_Rengar_Mid_Kayle(Ratings):
pass
class NA_Rengar_Mid_Kayn(Ratings):
pass
class NA_Rengar_Mid_Kennen(Ratings):
pass
class NA_Rengar_Mid_Khazix(Ratings):
pass
class NA_Rengar_Mid_Kindred(Ratings):
pass
class NA_Rengar_Mid_Kled(Ratings):
pass
class NA_Rengar_Mid_KogMaw(Ratings):
pass
class NA_Rengar_Mid_Leblanc(Ratings):
pass
class NA_Rengar_Mid_LeeSin(Ratings):
pass
class NA_Rengar_Mid_Leona(Ratings):
pass
class NA_Rengar_Mid_Lissandra(Ratings):
pass
class NA_Rengar_Mid_Lucian(Ratings):
pass
class NA_Rengar_Mid_Lulu(Ratings):
pass
class NA_Rengar_Mid_Lux(Ratings):
pass
class NA_Rengar_Mid_Malphite(Ratings):
pass
class NA_Rengar_Mid_Malzahar(Ratings):
pass
class NA_Rengar_Mid_Maokai(Ratings):
pass
class NA_Rengar_Mid_MasterYi(Ratings):
pass
class NA_Rengar_Mid_MissFortune(Ratings):
pass
class NA_Rengar_Mid_MonkeyKing(Ratings):
pass
class NA_Rengar_Mid_Mordekaiser(Ratings):
pass
class NA_Rengar_Mid_Morgana(Ratings):
pass
class NA_Rengar_Mid_Nami(Ratings):
pass
class NA_Rengar_Mid_Nasus(Ratings):
pass
class NA_Rengar_Mid_Nautilus(Ratings):
pass
class NA_Rengar_Mid_Nidalee(Ratings):
pass
class NA_Rengar_Mid_Nocturne(Ratings):
pass
class NA_Rengar_Mid_Nunu(Ratings):
pass
class NA_Rengar_Mid_Olaf(Ratings):
pass
class NA_Rengar_Mid_Orianna(Ratings):
pass
class NA_Rengar_Mid_Ornn(Ratings):
pass
class NA_Rengar_Mid_Pantheon(Ratings):
pass
class NA_Rengar_Mid_Poppy(Ratings):
pass
class NA_Rengar_Mid_Quinn(Ratings):
pass
class NA_Rengar_Mid_Rakan(Ratings):
pass
class NA_Rengar_Mid_Rammus(Ratings):
pass
class NA_Rengar_Mid_RekSai(Ratings):
pass
class NA_Rengar_Mid_Renekton(Ratings):
pass
class NA_Rengar_Mid_Rengar(Ratings):
pass
class NA_Rengar_Mid_Riven(Ratings):
pass
class NA_Rengar_Mid_Rumble(Ratings):
pass
class NA_Rengar_Mid_Ryze(Ratings):
pass
class NA_Rengar_Mid_Sejuani(Ratings):
pass
class NA_Rengar_Mid_Shaco(Ratings):
pass
class NA_Rengar_Mid_Shen(Ratings):
pass
class NA_Rengar_Mid_Shyvana(Ratings):
pass
class NA_Rengar_Mid_Singed(Ratings):
pass
class NA_Rengar_Mid_Sion(Ratings):
pass
class NA_Rengar_Mid_Sivir(Ratings):
pass
class NA_Rengar_Mid_Skarner(Ratings):
pass
class NA_Rengar_Mid_Sona(Ratings):
pass
class NA_Rengar_Mid_Soraka(Ratings):
pass
class NA_Rengar_Mid_Swain(Ratings):
pass
class NA_Rengar_Mid_Syndra(Ratings):
pass
class NA_Rengar_Mid_TahmKench(Ratings):
pass
class NA_Rengar_Mid_Taliyah(Ratings):
pass
class NA_Rengar_Mid_Talon(Ratings):
pass
class NA_Rengar_Mid_Taric(Ratings):
pass
class NA_Rengar_Mid_Teemo(Ratings):
pass
class NA_Rengar_Mid_Thresh(Ratings):
pass
class NA_Rengar_Mid_Tristana(Ratings):
pass
class NA_Rengar_Mid_Trundle(Ratings):
pass
class NA_Rengar_Mid_Tryndamere(Ratings):
pass
class NA_Rengar_Mid_TwistedFate(Ratings):
pass
class NA_Rengar_Mid_Twitch(Ratings):
pass
class NA_Rengar_Mid_Udyr(Ratings):
pass
class NA_Rengar_Mid_Urgot(Ratings):
pass
class NA_Rengar_Mid_Varus(Ratings):
pass
class NA_Rengar_Mid_Vayne(Ratings):
pass
class NA_Rengar_Mid_Veigar(Ratings):
pass
class NA_Rengar_Mid_Velkoz(Ratings):
pass
class NA_Rengar_Mid_Vi(Ratings):
pass
class NA_Rengar_Mid_Viktor(Ratings):
pass
class NA_Rengar_Mid_Vladimir(Ratings):
pass
class NA_Rengar_Mid_Volibear(Ratings):
pass
class NA_Rengar_Mid_Warwick(Ratings):
pass
class NA_Rengar_Mid_Xayah(Ratings):
pass
class NA_Rengar_Mid_Xerath(Ratings):
pass
class NA_Rengar_Mid_XinZhao(Ratings):
pass
class NA_Rengar_Mid_Yasuo(Ratings):
pass
class NA_Rengar_Mid_Yorick(Ratings):
pass
class NA_Rengar_Mid_Zac(Ratings):
pass
class NA_Rengar_Mid_Zed(Ratings):
pass
class NA_Rengar_Mid_Ziggs(Ratings):
pass
class NA_Rengar_Mid_Zilean(Ratings):
pass
class NA_Rengar_Mid_Zyra(Ratings):
pass
| 15.695444
| 46
| 0.766692
| 972
| 6,545
| 4.736626
| 0.151235
| 0.209818
| 0.389661
| 0.479583
| 0.803432
| 0.803432
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169748
| 6,545
| 416
| 47
| 15.733173
| 0.847258
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
a1cc680c5d6f410a35524d1c6900493495131044
| 181
|
py
|
Python
|
hw4/4.3.py
|
ArtemNikolaev/gb-hw
|
b82403e39dc1ca530dc438309fc98ba89ce4337b
|
[
"Unlicense"
] | null | null | null |
hw4/4.3.py
|
ArtemNikolaev/gb-hw
|
b82403e39dc1ca530dc438309fc98ba89ce4337b
|
[
"Unlicense"
] | 40
|
2021-12-30T15:57:10.000Z
|
2022-01-26T16:44:24.000Z
|
hw4/4.3.py
|
ArtemNikolaev/gb-hw
|
b82403e39dc1ca530dc438309fc98ba89ce4337b
|
[
"Unlicense"
] | 1
|
2022-03-12T19:17:26.000Z
|
2022-03-12T19:17:26.000Z
|
# https://github.com/ArtemNikolaev/gb-hw/issues/24
def multiple_of_20_21():
return (i for i in range(20, 241) if i % 20 == 0 or i % 21 == 0)
print(list(multiple_of_20_21()))
| 22.625
| 68
| 0.662983
| 35
| 181
| 3.257143
| 0.685714
| 0.175439
| 0.210526
| 0.245614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14
| 0.171271
| 181
| 7
| 69
| 25.857143
| 0.62
| 0.265193
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0.333333
| 0.666667
| 0.333333
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
a1cdf3d6b6757ac8b742a5871545ebfcd99aef04
| 13,761
|
py
|
Python
|
hopper_controller/src/hexapod/folding_manager.py
|
CreedyNZ/Hopper_ROS
|
1e6354109f034a7d1d41a5b39ddcb632cfee64b2
|
[
"MIT"
] | 36
|
2018-12-19T18:03:08.000Z
|
2022-02-21T16:20:12.000Z
|
hopper_controller/src/hexapod/folding_manager.py
|
CreedyNZ/Hopper_ROS
|
1e6354109f034a7d1d41a5b39ddcb632cfee64b2
|
[
"MIT"
] | null | null | null |
hopper_controller/src/hexapod/folding_manager.py
|
CreedyNZ/Hopper_ROS
|
1e6354109f034a7d1d41a5b39ddcb632cfee64b2
|
[
"MIT"
] | 7
|
2019-08-11T20:31:27.000Z
|
2021-09-19T04:34:18.000Z
|
import rospy
MOVE_CYCLE_PERIOD = 0.01
def move_towards(target, current, step=1):
if abs(target-current) < step:
return target, True
else:
if target > current:
return current + step, False
else:
return current - step, False
def move_leg(leg, coxa=None, femur=None, tibia=None, step=1.3):
coxa_done = True
femur_done = True
tibia_done = True
if coxa:
leg.coxa, coxa_done = move_towards(coxa, leg.coxa, step)
if femur:
leg.femur, femur_done = move_towards(femur, leg.femur, step)
if tibia:
leg.tibia, tibia_done = move_towards(tibia, leg.tibia, step)
return coxa_done and femur_done and tibia_done
def is_leg_close(leg, coxa=None, femur=None, tibia=None, tolerance=20):
coxa_close = True
femur_close = True
tibia_close = True
if coxa:
coxa_close = leg.coxa + tolerance > coxa > leg.coxa - tolerance
if femur:
femur_close = leg.femur + tolerance > femur > leg.femur - tolerance
if tibia:
tibia_close = leg.tibia + tolerance > tibia > leg.tibia - tolerance
return coxa_close and femur_close and tibia_close
class FoldingManager(object):
def __init__(self, body_controller):
super(FoldingManager, self).__init__()
self.body_controller = body_controller
self.last_motor_position = None
def position_femur_tibia(self):
current_position = self.body_controller.read_hexapod_motor_positions()
self.last_motor_position = current_position
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, None, 60, 240)
lm = move_leg(self.last_motor_position.left_middle, None, 60, 240)
lr = move_leg(self.last_motor_position.left_rear, None, 60, 240)
rf = move_leg(self.last_motor_position.right_front, None, 240, 60)
rm = move_leg(self.last_motor_position.right_middle, None, 240, 60)
rr = move_leg(self.last_motor_position.right_rear, None, 240, 60)
self.body_controller.set_motors(self.last_motor_position)
if lf and lm and lr and rf and rm and rr:
break
rospy.sleep(0.05)
def check_if_folded(self):
current_position = self.body_controller.read_hexapod_motor_positions()
self.last_motor_position = current_position
lf = is_leg_close(self.last_motor_position.left_front, 240)
lm = is_leg_close(self.last_motor_position.left_middle, 240) or is_leg_close(self.last_motor_position.left_middle, 60)
lr = is_leg_close(self.last_motor_position.left_rear, 60)
rf = is_leg_close(self.last_motor_position.right_front, 60)
rm = is_leg_close(self.last_motor_position.right_middle, 60) or is_leg_close(self.last_motor_position.right_middle, 240)
rr = is_leg_close(self.last_motor_position.right_rear, 240)
return lf and lm and lr and rf and rm and rr
def unfold(self):
self.position_femur_tibia()
current_position = self.body_controller.read_hexapod_motor_positions()
self.last_motor_position = current_position
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = False
lr = False
rf = False
rr = False
if self.last_motor_position.left_middle.coxa > 120:
lf = move_leg(self.last_motor_position.left_front, 150)
lm = move_leg(self.last_motor_position.left_middle, 150)
if self.last_motor_position.left_middle.coxa < 180:
lr = move_leg(self.last_motor_position.left_rear, 150)
if self.last_motor_position.right_middle.coxa < 180:
rf = move_leg(self.last_motor_position.right_front, 150)
rm = move_leg(self.last_motor_position.right_middle, 150)
if self.last_motor_position.right_middle.coxa > 120:
rr = move_leg(self.last_motor_position.right_rear, 150)
self.body_controller.set_motors(self.last_motor_position)
if lf and lm and lr and rf and rm and rr:
break
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, tibia=210)
lm = move_leg(self.last_motor_position.left_middle, tibia=210)
lr = move_leg(self.last_motor_position.left_rear, tibia=210)
rf = move_leg(self.last_motor_position.right_front, tibia=90)
rm = move_leg(self.last_motor_position.right_middle, tibia=90)
rr = move_leg(self.last_motor_position.right_rear, tibia=90)
self.body_controller.set_motors(self.last_motor_position)
if lf and lm and lr and rf and rm and rr:
break
rospy.sleep(0.2)
self.body_controller.set_torque(False)
def fold(self):
self.position_femur_tibia()
current_position = self.body_controller.read_hexapod_motor_positions()
self.last_motor_position = current_position
if not self.check_if_folded():
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, 150)
rm = move_leg(self.last_motor_position.right_middle, 150)
self.body_controller.set_motors(self.last_motor_position)
if lm and rm:
break
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, 240)
lr = move_leg(self.last_motor_position.left_rear, 60)
rf = move_leg(self.last_motor_position.right_front, 60)
rr = move_leg(self.last_motor_position.right_rear, 240)
self.body_controller.set_motors(self.last_motor_position)
if lf and lr and rf and rr:
break
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, 240)
rm = move_leg(self.last_motor_position.right_middle, 60)
self.body_controller.set_motors(self.last_motor_position)
if lm and rm:
break
rospy.sleep(0.2)
self.body_controller.set_torque(False)
def unfold_on_ground(self):
self.position_femur_tibia()
current_position = self.body_controller.read_hexapod_motor_positions()
self.last_motor_position = current_position
# lift middle legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, tibia=200)
rm = move_leg(self.last_motor_position.right_middle, tibia=100)
self.body_controller.set_motors(self.last_motor_position)
if lm and rm:
break
# fold out middle legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, coxa=150)
rm = move_leg(self.last_motor_position.right_middle, coxa=150)
self.body_controller.set_motors(self.last_motor_position)
if lm and rm:
break
# lower right leg
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
rm = move_leg(self.last_motor_position.right_middle, femur=170, tibia=100)
self.body_controller.set_motors(self.last_motor_position)
if rm:
break
# unfold right legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
rf = move_leg(self.last_motor_position.right_front, coxa=150)
rr = move_leg(self.last_motor_position.right_rear, coxa=150)
self.body_controller.set_motors(self.last_motor_position)
if rf and rr:
break
# lift right legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
rf = move_leg(self.last_motor_position.right_front, tibia=90)
rr = move_leg(self.last_motor_position.right_rear, tibia=90)
self.body_controller.set_motors(self.last_motor_position)
if rf and rr:
break
# switch lifted side
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, femur=130, tibia=200)
rm = move_leg(self.last_motor_position.right_middle, femur=240, tibia=90)
self.body_controller.set_motors(self.last_motor_position)
if rm and lm:
break
# unfold left legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, coxa=150)
lr = move_leg(self.last_motor_position.left_rear, coxa=150)
self.body_controller.set_motors(self.last_motor_position)
if lf and lr:
break
# lift left legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, tibia=210)
lr = move_leg(self.last_motor_position.left_rear, tibia=210)
self.body_controller.set_motors(self.last_motor_position)
if lf and lr:
break
# lift middle left
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, femur=60, tibia=210)
self.body_controller.set_motors(self.last_motor_position)
if lm:
break
rospy.sleep(0.2)
self.body_controller.set_torque(False)
def fold_on_ground(self):
current_position = self.body_controller.read_hexapod_motor_positions()
self.last_motor_position = current_position
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, 150, femur=60, tibia=210)
lm = move_leg(self.last_motor_position.left_middle, 150, femur=60, tibia=210)
lr = move_leg(self.last_motor_position.left_rear, 150, femur=60, tibia=210)
rf = move_leg(self.last_motor_position.right_front, 150, femur=240, tibia=90)
rm = move_leg(self.last_motor_position.right_middle, 150, femur=240, tibia=90)
rr = move_leg(self.last_motor_position.right_rear, 150, femur=240, tibia=90)
self.body_controller.set_motors(self.last_motor_position)
if lf and lm and lr and rf and rm and rr:
break
# lower right leg
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
rm = move_leg(self.last_motor_position.right_middle, femur=170, tibia=100)
self.body_controller.set_motors(self.last_motor_position)
if rm:
break
# compress right legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
rf = move_leg(self.last_motor_position.right_front, None, 240, 60)
rr = move_leg(self.last_motor_position.right_rear, None, 240, 60)
self.body_controller.set_motors(self.last_motor_position)
if rf and rr:
break
# fold right legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
rf = move_leg(self.last_motor_position.right_front, 60)
rr = move_leg(self.last_motor_position.right_rear, 240)
self.body_controller.set_motors(self.last_motor_position)
if rf and rr:
break
# switch lifted side
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, femur=130, tibia=200)
rm = move_leg(self.last_motor_position.right_middle, femur=240, tibia=90)
self.body_controller.set_motors(self.last_motor_position)
if rm and lm:
break
# compress left legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, None, 60, 240)
lr = move_leg(self.last_motor_position.left_rear, None, 60, 240)
self.body_controller.set_motors(self.last_motor_position)
if lf and lr:
break
# fold left legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, 240)
lr = move_leg(self.last_motor_position.left_rear, 60)
self.body_controller.set_motors(self.last_motor_position)
if lf and lr:
break
# lift left middle leg
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, femur=60, tibia=210)
self.body_controller.set_motors(self.last_motor_position)
if lm:
break
# fold middle legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, 230)
rm = move_leg(self.last_motor_position.right_middle, 70)
self.body_controller.set_motors(self.last_motor_position)
if lm and rm:
break
# compress middle legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, None, 60, 240)
rm = move_leg(self.last_motor_position.right_middle, None, 240, 60)
self.body_controller.set_motors(self.last_motor_position)
if lm and rm:
break
rospy.sleep(0.2)
self.body_controller.set_torque(False)
| 44.824104
| 128
| 0.637599
| 1,849
| 13,761
| 4.436993
| 0.050297
| 0.105314
| 0.171136
| 0.276451
| 0.870673
| 0.865553
| 0.865553
| 0.854705
| 0.826548
| 0.783642
| 0
| 0.03118
| 0.286825
| 13,761
| 306
| 129
| 44.970588
| 0.804769
| 0.023181
| 0
| 0.716912
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036765
| false
| 0
| 0.003676
| 0
| 0.066176
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b8014951415d289b10583d9f4dc51aea80536fbd
| 4,905
|
py
|
Python
|
ksteta3pi/Consideredbkg/MC_12_11134011_MagUp.py
|
Williams224/davinci-scripts
|
730642d2ff13543eca4073a4ce0932631195de56
|
[
"MIT"
] | null | null | null |
ksteta3pi/Consideredbkg/MC_12_11134011_MagUp.py
|
Williams224/davinci-scripts
|
730642d2ff13543eca4073a4ce0932631195de56
|
[
"MIT"
] | null | null | null |
ksteta3pi/Consideredbkg/MC_12_11134011_MagUp.py
|
Williams224/davinci-scripts
|
730642d2ff13543eca4073a4ce0932631195de56
|
[
"MIT"
] | null | null | null |
#-- GAUDI jobOptions generated on Mon Jul 20 10:20:49 2015
#-- Contains event types :
#-- 11134011 - 42 files - 900254 events - 251.92 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass Step-125836
#-- StepId : 125836
#-- StepName : Stripping20-NoPrescalingFlagged for Sim08 - Implicit merging.
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v32r2p1
#-- OptionFiles : $APPCONFIGOPTS/DaVinci/DV-Stripping20-Stripping-MC-NoPrescaling.py;$APPCONFIGOPTS/DaVinci/DataType-2012.py;$APPCONFIGOPTS/DaVinci/InputType-DST.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-127969
#-- StepId : 127969
#-- StepName : Reco14c for MC - 2012
#-- ApplicationName : Brunel
#-- ApplicationVersion : v43r2p11
#-- OptionFiles : $APPCONFIGOPTS/Brunel/DataType-2012.py;$APPCONFIGOPTS/Brunel/MC-WithTruth.py;$APPCONFIGOPTS/Persistency/DST-multipleTCK-2012.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r218
#-- Visible : Y
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles(['LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000001_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000002_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000003_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000004_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000005_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000006_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000007_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000008_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000009_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000010_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000011_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000012_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000013_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000014_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000015_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000016_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000017_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000018_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000019_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000020_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000021_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000022_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000023_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000024_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000025_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000026_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000027_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000029_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000030_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000031_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000032_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000033_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000034_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000035_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000036_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000037_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000038_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000039_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000040_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000041_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000042_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00046297/0000/00046297_00000043_2.AllStreams.dst'
], clear=True)
| 62.088608
| 215
| 0.798777
| 686
| 4,905
| 5.588921
| 0.189504
| 0.28482
| 0.098592
| 0.14241
| 0.709963
| 0.709963
| 0.709963
| 0.709963
| 0.709963
| 0.697966
| 0
| 0.317793
| 0.046687
| 4,905
| 78
| 216
| 62.884615
| 0.502139
| 0.238124
| 0
| 0
| 1
| 0.933333
| 0.917565
| 0.916487
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.044444
| 0
| 0.044444
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
b80bd1236784afca06c2fdaedb154f5764c38921
| 258
|
py
|
Python
|
henrietta/tests/__init__.py
|
zkbt/henrietta
|
653d798b241ad5591b704967a0413a2457a4e734
|
[
"MIT"
] | null | null | null |
henrietta/tests/__init__.py
|
zkbt/henrietta
|
653d798b241ad5591b704967a0413a2457a4e734
|
[
"MIT"
] | 12
|
2018-09-12T03:56:04.000Z
|
2019-02-15T04:12:53.000Z
|
henrietta/tests/__init__.py
|
zkbt/henrietta
|
653d798b241ad5591b704967a0413a2457a4e734
|
[
"MIT"
] | null | null | null |
from .test_lightcurves import *
from .test_statistics import *
from .test_models import *
from .test_fitting import *
from .test_tools import *
from .test_photometry import *
from .test_tpf import *
from .test_imaging import *
from .test_photometry import *
| 25.8
| 31
| 0.790698
| 36
| 258
| 5.416667
| 0.305556
| 0.369231
| 0.574359
| 0.246154
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 258
| 9
| 32
| 28.666667
| 0.878378
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
62dc5a004b7115829f44a8eadc00ed4081475f1f
| 161
|
py
|
Python
|
src/libs/django/utils/request.py
|
antiline/jun2
|
00928cea1f4b8cd6634cf9a1ae6dc19c95d0e54c
|
[
"MIT"
] | null | null | null |
src/libs/django/utils/request.py
|
antiline/jun2
|
00928cea1f4b8cd6634cf9a1ae6dc19c95d0e54c
|
[
"MIT"
] | 17
|
2019-06-24T14:11:49.000Z
|
2021-06-04T22:19:59.000Z
|
src/libs/django/utils/request.py
|
tabetaku/roots
|
8a9f91b8b0e0b64a85db2898a537b12be65de753
|
[
"MIT"
] | null | null | null |
from ipware.ip import get_ip
from ipware.utils import is_private_ip
def is_private_ip_from_request(request) -> bool:
return is_private_ip(get_ip(request))
| 23
| 48
| 0.807453
| 28
| 161
| 4.285714
| 0.428571
| 0.225
| 0.275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124224
| 161
| 6
| 49
| 26.833333
| 0.851064
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
1a0ee9a3148043007875afdc8ae0b227516a59d4
| 131,586
|
py
|
Python
|
pybind/slxos/v17r_2_00/mpls_state/lsp/secondary_path/__init__.py
|
extremenetworks/pybind
|
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v17r_2_00/mpls_state/lsp/secondary_path/__init__.py
|
extremenetworks/pybind
|
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v17r_2_00/mpls_state/lsp/secondary_path/__init__.py
|
extremenetworks/pybind
|
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import admin_group_lists
import auto_bandwidth
class secondary_path(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls-operational - based on the path /mpls-state/lsp/secondary-path. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: MPLS LSP secondary path information
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__path_name','__instance_id','__path_up','__active','__committed','__is_new_instance','__is_current_secondary','__is_selected_secondary','__config_adaptive','__config_reoptimize_timer_configured','__config_reoptimize_timer','__config_reoptimize_timer_count','__config_tspec_mtu_configured','__config_tspec_mtu','__config_cos_configured','__config_cos','__config_mtu_configured','__config_mtu','__config_tie_breaking_configured','__config_tie_break_random','__config_tie_break_least_fill','__config_tie_break_most_fill','__config_cspf_disabled','__config_rro_disabled','__config_hot_standby','__config_pinned','__config_persistent','__config_soft_prempt','__config_priority_configured','__config_setup_prority','__config_holding_prority','__config_hop_limit_configured','__config_hop_limit','__config_traffic_eng_rate_configured','__config_traffic_eng_mean_rate','__config_traffic_eng_max_rate','__config_traffic_eng_max_burst','__config_abw_configured','__config_cspf_computation_mode','__config_admin_group_configured','__admin_group_lists','__auto_bandwidth',)
_yang_name = 'secondary-path'
_rest_name = 'secondary-path'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__path_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="path-name", rest_name="path-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
self.__is_selected_secondary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-selected-secondary", rest_name="is-selected-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_cos = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-cos", rest_name="config-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__config_traffic_eng_max_rate = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-rate", rest_name="config-traffic-eng-max-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__config_cos_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-cos-configured", rest_name="config-cos-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_traffic_eng_max_burst = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-burst", rest_name="config-traffic-eng-max-burst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__config_soft_prempt = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-soft-prempt", rest_name="config-soft-prempt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__committed = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="committed", rest_name="committed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_holding_prority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-holding-prority", rest_name="config-holding-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__instance_id = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="instance-id", rest_name="instance-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_cspf_disabled = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-cspf-disabled", rest_name="config-cspf-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_hop_limit_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-hop-limit-configured", rest_name="config-hop-limit-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_pinned = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-pinned", rest_name="config-pinned", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_mtu_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-mtu-configured", rest_name="config-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_persistent = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-persistent", rest_name="config-persistent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_reoptimize_timer_count = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer-count", rest_name="config-reoptimize-timer-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__auto_bandwidth = YANGDynClass(base=auto_bandwidth.auto_bandwidth, is_container='container', presence=False, yang_name="auto-bandwidth", rest_name="auto-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-auto-bandwidth-config-auto-bandwidth-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)
self.__is_new_instance = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-new-instance", rest_name="is-new-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_tie_break_least_fill = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-least-fill", rest_name="config-tie-break-least-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_cspf_computation_mode = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'cspf-computation-mode-default': {'value': 1}, u'cspf-computation-mode-use-bypass-metric': {'value': 2}, u'cspf-computation-mode-use-igp-metric-global': {'value': 7}, u'cspf-computation-mode-use-igp-metric': {'value': 5}, u'cspf-computation-mode-use-te-metric': {'value': 4}, u'cspf-computation-mode-use-bypass-liberal': {'value': 3}, u'cspf-computation-mode-use-te-metric-global': {'value': 6}},), is_leaf=True, yang_name="config-cspf-computation-mode", rest_name="config-cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='lsp-cspf-computation-mode', is_config=False)
self.__config_reoptimize_timer = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer", rest_name="config-reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__config_traffic_eng_rate_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-traffic-eng-rate-configured", rest_name="config-traffic-eng-rate-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_tie_breaking_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-breaking-configured", rest_name="config-tie-breaking-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_mtu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-mtu", rest_name="config-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__admin_group_lists = YANGDynClass(base=YANGListType("list_type",admin_group_lists.admin_group_lists, yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='list-type', extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}), is_container='list', yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)
self.__active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", rest_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_traffic_eng_mean_rate = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-mean-rate", rest_name="config-traffic-eng-mean-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__config_hot_standby = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-hot-standby", rest_name="config-hot-standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_reoptimize_timer_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-reoptimize-timer-configured", rest_name="config-reoptimize-timer-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_adaptive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-adaptive", rest_name="config-adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_priority_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-priority-configured", rest_name="config-priority-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_tspec_mtu_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tspec-mtu-configured", rest_name="config-tspec-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_tie_break_most_fill = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-most-fill", rest_name="config-tie-break-most-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__path_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="path-up", rest_name="path-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_setup_prority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-setup-prority", rest_name="config-setup-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__config_abw_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-abw-configured", rest_name="config-abw-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_rro_disabled = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-rro-disabled", rest_name="config-rro-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_admin_group_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-admin-group-configured", rest_name="config-admin-group-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_hop_limit = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-hop-limit", rest_name="config-hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
self.__is_current_secondary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-current-secondary", rest_name="is-current-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_tie_break_random = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-random", rest_name="config-tie-break-random", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__config_tspec_mtu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-tspec-mtu", rest_name="config-tspec-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'mpls-state', u'lsp', u'secondary-path']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'mpls-state', u'lsp', u'secondary-path']
def _get_path_name(self):
"""
Getter method for path_name, mapped from YANG variable /mpls_state/lsp/secondary_path/path_name (string)
YANG Description: lsp_sec_path_path_name
"""
return self.__path_name
def _set_path_name(self, v, load=False):
"""
Setter method for path_name, mapped from YANG variable /mpls_state/lsp/secondary_path/path_name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_path_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_path_name() directly.
YANG Description: lsp_sec_path_path_name
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="path-name", rest_name="path-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """path_name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="path-name", rest_name="path-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)""",
})
self.__path_name = t
if hasattr(self, '_set'):
self._set()
def _unset_path_name(self):
self.__path_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="path-name", rest_name="path-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
def _get_instance_id(self):
"""
Getter method for instance_id, mapped from YANG variable /mpls_state/lsp/secondary_path/instance_id (boolean)
YANG Description: lsp_sec_path_instance_id
"""
return self.__instance_id
def _set_instance_id(self, v, load=False):
"""
Setter method for instance_id, mapped from YANG variable /mpls_state/lsp/secondary_path/instance_id (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_instance_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_instance_id() directly.
YANG Description: lsp_sec_path_instance_id
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="instance-id", rest_name="instance-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """instance_id must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="instance-id", rest_name="instance-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__instance_id = t
if hasattr(self, '_set'):
self._set()
def _unset_instance_id(self):
self.__instance_id = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="instance-id", rest_name="instance-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_path_up(self):
"""
Getter method for path_up, mapped from YANG variable /mpls_state/lsp/secondary_path/path_up (boolean)
YANG Description: lsp_sec_path_up
"""
return self.__path_up
def _set_path_up(self, v, load=False):
"""
Setter method for path_up, mapped from YANG variable /mpls_state/lsp/secondary_path/path_up (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_path_up is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_path_up() directly.
YANG Description: lsp_sec_path_up
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="path-up", rest_name="path-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """path_up must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="path-up", rest_name="path-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__path_up = t
if hasattr(self, '_set'):
self._set()
def _unset_path_up(self):
self.__path_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="path-up", rest_name="path-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_active(self):
"""
Getter method for active, mapped from YANG variable /mpls_state/lsp/secondary_path/active (boolean)
YANG Description: lsp_sec_path_active
"""
return self.__active
def _set_active(self, v, load=False):
"""
Setter method for active, mapped from YANG variable /mpls_state/lsp/secondary_path/active (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_active is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_active() directly.
YANG Description: lsp_sec_path_active
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="active", rest_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """active must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", rest_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__active = t
if hasattr(self, '_set'):
self._set()
def _unset_active(self):
self.__active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", rest_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_committed(self):
"""
Getter method for committed, mapped from YANG variable /mpls_state/lsp/secondary_path/committed (boolean)
YANG Description: lsp_sec_path_committed
"""
return self.__committed
def _set_committed(self, v, load=False):
"""
Setter method for committed, mapped from YANG variable /mpls_state/lsp/secondary_path/committed (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_committed is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_committed() directly.
YANG Description: lsp_sec_path_committed
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="committed", rest_name="committed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """committed must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="committed", rest_name="committed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__committed = t
if hasattr(self, '_set'):
self._set()
def _unset_committed(self):
self.__committed = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="committed", rest_name="committed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_is_new_instance(self):
"""
Getter method for is_new_instance, mapped from YANG variable /mpls_state/lsp/secondary_path/is_new_instance (boolean)
YANG Description: lsp_sec_path_is_new_instance
"""
return self.__is_new_instance
def _set_is_new_instance(self, v, load=False):
"""
Setter method for is_new_instance, mapped from YANG variable /mpls_state/lsp/secondary_path/is_new_instance (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_is_new_instance is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_is_new_instance() directly.
YANG Description: lsp_sec_path_is_new_instance
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="is-new-instance", rest_name="is-new-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """is_new_instance must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-new-instance", rest_name="is-new-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__is_new_instance = t
if hasattr(self, '_set'):
self._set()
def _unset_is_new_instance(self):
self.__is_new_instance = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-new-instance", rest_name="is-new-instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_is_current_secondary(self):
"""
Getter method for is_current_secondary, mapped from YANG variable /mpls_state/lsp/secondary_path/is_current_secondary (boolean)
YANG Description: lsp_sec_path_is_current_secondary
"""
return self.__is_current_secondary
def _set_is_current_secondary(self, v, load=False):
"""
Setter method for is_current_secondary, mapped from YANG variable /mpls_state/lsp/secondary_path/is_current_secondary (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_is_current_secondary is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_is_current_secondary() directly.
YANG Description: lsp_sec_path_is_current_secondary
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="is-current-secondary", rest_name="is-current-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """is_current_secondary must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-current-secondary", rest_name="is-current-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__is_current_secondary = t
if hasattr(self, '_set'):
self._set()
def _unset_is_current_secondary(self):
self.__is_current_secondary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-current-secondary", rest_name="is-current-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_is_selected_secondary(self):
"""
Getter method for is_selected_secondary, mapped from YANG variable /mpls_state/lsp/secondary_path/is_selected_secondary (boolean)
YANG Description: lsp_sec_path_is_selected_secondary
"""
return self.__is_selected_secondary
def _set_is_selected_secondary(self, v, load=False):
"""
Setter method for is_selected_secondary, mapped from YANG variable /mpls_state/lsp/secondary_path/is_selected_secondary (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_is_selected_secondary is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_is_selected_secondary() directly.
YANG Description: lsp_sec_path_is_selected_secondary
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="is-selected-secondary", rest_name="is-selected-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """is_selected_secondary must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-selected-secondary", rest_name="is-selected-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__is_selected_secondary = t
if hasattr(self, '_set'):
self._set()
def _unset_is_selected_secondary(self):
self.__is_selected_secondary = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="is-selected-secondary", rest_name="is-selected-secondary", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_adaptive(self):
"""
Getter method for config_adaptive, mapped from YANG variable /mpls_state/lsp/secondary_path/config_adaptive (boolean)
YANG Description: lsp_sec_path_config_adaptive
"""
return self.__config_adaptive
def _set_config_adaptive(self, v, load=False):
"""
Setter method for config_adaptive, mapped from YANG variable /mpls_state/lsp/secondary_path/config_adaptive (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_adaptive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_adaptive() directly.
YANG Description: lsp_sec_path_config_adaptive
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-adaptive", rest_name="config-adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_adaptive must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-adaptive", rest_name="config-adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_adaptive = t
if hasattr(self, '_set'):
self._set()
def _unset_config_adaptive(self):
self.__config_adaptive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-adaptive", rest_name="config-adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_reoptimize_timer_configured(self):
"""
Getter method for config_reoptimize_timer_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_reoptimize_timer_configured (boolean)
YANG Description: lsp_sec_path_config_reoptimize_timer_configured
"""
return self.__config_reoptimize_timer_configured
def _set_config_reoptimize_timer_configured(self, v, load=False):
"""
Setter method for config_reoptimize_timer_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_reoptimize_timer_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_reoptimize_timer_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_reoptimize_timer_configured() directly.
YANG Description: lsp_sec_path_config_reoptimize_timer_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-reoptimize-timer-configured", rest_name="config-reoptimize-timer-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_reoptimize_timer_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-reoptimize-timer-configured", rest_name="config-reoptimize-timer-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_reoptimize_timer_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_reoptimize_timer_configured(self):
self.__config_reoptimize_timer_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-reoptimize-timer-configured", rest_name="config-reoptimize-timer-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_reoptimize_timer(self):
"""
Getter method for config_reoptimize_timer, mapped from YANG variable /mpls_state/lsp/secondary_path/config_reoptimize_timer (uint32)
YANG Description: lsp_sec_path_config_reoptimize_timer
"""
return self.__config_reoptimize_timer
def _set_config_reoptimize_timer(self, v, load=False):
"""
Setter method for config_reoptimize_timer, mapped from YANG variable /mpls_state/lsp/secondary_path/config_reoptimize_timer (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_reoptimize_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_reoptimize_timer() directly.
YANG Description: lsp_sec_path_config_reoptimize_timer
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer", rest_name="config-reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_reoptimize_timer must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer", rest_name="config-reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_reoptimize_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_config_reoptimize_timer(self):
self.__config_reoptimize_timer = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer", rest_name="config-reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_reoptimize_timer_count(self):
"""
Getter method for config_reoptimize_timer_count, mapped from YANG variable /mpls_state/lsp/secondary_path/config_reoptimize_timer_count (uint32)
YANG Description: lsp_sec_path_config_reoptimize_timer_count
"""
return self.__config_reoptimize_timer_count
def _set_config_reoptimize_timer_count(self, v, load=False):
"""
Setter method for config_reoptimize_timer_count, mapped from YANG variable /mpls_state/lsp/secondary_path/config_reoptimize_timer_count (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_reoptimize_timer_count is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_reoptimize_timer_count() directly.
YANG Description: lsp_sec_path_config_reoptimize_timer_count
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer-count", rest_name="config-reoptimize-timer-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_reoptimize_timer_count must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer-count", rest_name="config-reoptimize-timer-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_reoptimize_timer_count = t
if hasattr(self, '_set'):
self._set()
def _unset_config_reoptimize_timer_count(self):
self.__config_reoptimize_timer_count = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-reoptimize-timer-count", rest_name="config-reoptimize-timer-count", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_tspec_mtu_configured(self):
"""
Getter method for config_tspec_mtu_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tspec_mtu_configured (boolean)
YANG Description: lsp_sec_path_config_tspec_mtu_configured
"""
return self.__config_tspec_mtu_configured
def _set_config_tspec_mtu_configured(self, v, load=False):
"""
Setter method for config_tspec_mtu_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tspec_mtu_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_tspec_mtu_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_tspec_mtu_configured() directly.
YANG Description: lsp_sec_path_config_tspec_mtu_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-tspec-mtu-configured", rest_name="config-tspec-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_tspec_mtu_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tspec-mtu-configured", rest_name="config-tspec-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_tspec_mtu_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_tspec_mtu_configured(self):
self.__config_tspec_mtu_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tspec-mtu-configured", rest_name="config-tspec-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_tspec_mtu(self):
"""
Getter method for config_tspec_mtu, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tspec_mtu (uint32)
YANG Description: lsp_sec_path_config_tspec_mtu
"""
return self.__config_tspec_mtu
def _set_config_tspec_mtu(self, v, load=False):
"""
Setter method for config_tspec_mtu, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tspec_mtu (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_tspec_mtu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_tspec_mtu() directly.
YANG Description: lsp_sec_path_config_tspec_mtu
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-tspec-mtu", rest_name="config-tspec-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_tspec_mtu must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-tspec-mtu", rest_name="config-tspec-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_tspec_mtu = t
if hasattr(self, '_set'):
self._set()
def _unset_config_tspec_mtu(self):
self.__config_tspec_mtu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-tspec-mtu", rest_name="config-tspec-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_cos_configured(self):
"""
Getter method for config_cos_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cos_configured (boolean)
YANG Description: lsp_sec_path_config_cos_configured
"""
return self.__config_cos_configured
def _set_config_cos_configured(self, v, load=False):
"""
Setter method for config_cos_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cos_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_cos_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_cos_configured() directly.
YANG Description: lsp_sec_path_config_cos_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-cos-configured", rest_name="config-cos-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_cos_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-cos-configured", rest_name="config-cos-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_cos_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_cos_configured(self):
self.__config_cos_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-cos-configured", rest_name="config-cos-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_cos(self):
"""
Getter method for config_cos, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cos (uint32)
YANG Description: lsp_sec_path_config_cos
"""
return self.__config_cos
def _set_config_cos(self, v, load=False):
"""
Setter method for config_cos, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cos (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_cos is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_cos() directly.
YANG Description: lsp_sec_path_config_cos
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-cos", rest_name="config-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_cos must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-cos", rest_name="config-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_cos = t
if hasattr(self, '_set'):
self._set()
def _unset_config_cos(self):
self.__config_cos = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-cos", rest_name="config-cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_mtu_configured(self):
"""
Getter method for config_mtu_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_mtu_configured (boolean)
YANG Description: lsp_sec_path_config_mtu_configured
"""
return self.__config_mtu_configured
def _set_config_mtu_configured(self, v, load=False):
"""
Setter method for config_mtu_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_mtu_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_mtu_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_mtu_configured() directly.
YANG Description: lsp_sec_path_config_mtu_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-mtu-configured", rest_name="config-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_mtu_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-mtu-configured", rest_name="config-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_mtu_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_mtu_configured(self):
self.__config_mtu_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-mtu-configured", rest_name="config-mtu-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_mtu(self):
"""
Getter method for config_mtu, mapped from YANG variable /mpls_state/lsp/secondary_path/config_mtu (uint32)
YANG Description: lsp_sec_path_config_mtu
"""
return self.__config_mtu
def _set_config_mtu(self, v, load=False):
"""
Setter method for config_mtu, mapped from YANG variable /mpls_state/lsp/secondary_path/config_mtu (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_mtu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_mtu() directly.
YANG Description: lsp_sec_path_config_mtu
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-mtu", rest_name="config-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_mtu must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-mtu", rest_name="config-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_mtu = t
if hasattr(self, '_set'):
self._set()
def _unset_config_mtu(self):
self.__config_mtu = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-mtu", rest_name="config-mtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_tie_breaking_configured(self):
"""
Getter method for config_tie_breaking_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_breaking_configured (boolean)
YANG Description: lsp_sec_path_config_tie_breaking_configured
"""
return self.__config_tie_breaking_configured
def _set_config_tie_breaking_configured(self, v, load=False):
"""
Setter method for config_tie_breaking_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_breaking_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_tie_breaking_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_tie_breaking_configured() directly.
YANG Description: lsp_sec_path_config_tie_breaking_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-tie-breaking-configured", rest_name="config-tie-breaking-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_tie_breaking_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-breaking-configured", rest_name="config-tie-breaking-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_tie_breaking_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_tie_breaking_configured(self):
self.__config_tie_breaking_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-breaking-configured", rest_name="config-tie-breaking-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_tie_break_random(self):
"""
Getter method for config_tie_break_random, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_break_random (boolean)
YANG Description: lsp_sec_path_config_tie_break_random
"""
return self.__config_tie_break_random
def _set_config_tie_break_random(self, v, load=False):
"""
Setter method for config_tie_break_random, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_break_random (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_tie_break_random is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_tie_break_random() directly.
YANG Description: lsp_sec_path_config_tie_break_random
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-tie-break-random", rest_name="config-tie-break-random", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_tie_break_random must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-random", rest_name="config-tie-break-random", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_tie_break_random = t
if hasattr(self, '_set'):
self._set()
def _unset_config_tie_break_random(self):
self.__config_tie_break_random = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-random", rest_name="config-tie-break-random", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_tie_break_least_fill(self):
"""
Getter method for config_tie_break_least_fill, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_break_least_fill (boolean)
YANG Description: lsp_sec_path_config_tie_break_least_fill
"""
return self.__config_tie_break_least_fill
def _set_config_tie_break_least_fill(self, v, load=False):
"""
Setter method for config_tie_break_least_fill, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_break_least_fill (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_tie_break_least_fill is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_tie_break_least_fill() directly.
YANG Description: lsp_sec_path_config_tie_break_least_fill
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-tie-break-least-fill", rest_name="config-tie-break-least-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_tie_break_least_fill must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-least-fill", rest_name="config-tie-break-least-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_tie_break_least_fill = t
if hasattr(self, '_set'):
self._set()
def _unset_config_tie_break_least_fill(self):
self.__config_tie_break_least_fill = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-least-fill", rest_name="config-tie-break-least-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_tie_break_most_fill(self):
"""
Getter method for config_tie_break_most_fill, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_break_most_fill (boolean)
YANG Description: lsp_sec_path_config_tie_break_most_fill
"""
return self.__config_tie_break_most_fill
def _set_config_tie_break_most_fill(self, v, load=False):
"""
Setter method for config_tie_break_most_fill, mapped from YANG variable /mpls_state/lsp/secondary_path/config_tie_break_most_fill (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_tie_break_most_fill is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_tie_break_most_fill() directly.
YANG Description: lsp_sec_path_config_tie_break_most_fill
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-tie-break-most-fill", rest_name="config-tie-break-most-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_tie_break_most_fill must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-most-fill", rest_name="config-tie-break-most-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_tie_break_most_fill = t
if hasattr(self, '_set'):
self._set()
def _unset_config_tie_break_most_fill(self):
self.__config_tie_break_most_fill = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-tie-break-most-fill", rest_name="config-tie-break-most-fill", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_cspf_disabled(self):
"""
Getter method for config_cspf_disabled, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cspf_disabled (boolean)
YANG Description: lsp_sec_path_config_cspf_disabled
"""
return self.__config_cspf_disabled
def _set_config_cspf_disabled(self, v, load=False):
"""
Setter method for config_cspf_disabled, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cspf_disabled (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_cspf_disabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_cspf_disabled() directly.
YANG Description: lsp_sec_path_config_cspf_disabled
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-cspf-disabled", rest_name="config-cspf-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_cspf_disabled must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-cspf-disabled", rest_name="config-cspf-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_cspf_disabled = t
if hasattr(self, '_set'):
self._set()
def _unset_config_cspf_disabled(self):
self.__config_cspf_disabled = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-cspf-disabled", rest_name="config-cspf-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_rro_disabled(self):
"""
Getter method for config_rro_disabled, mapped from YANG variable /mpls_state/lsp/secondary_path/config_rro_disabled (boolean)
YANG Description: lsp_sec_path_config_rro_disabled
"""
return self.__config_rro_disabled
def _set_config_rro_disabled(self, v, load=False):
"""
Setter method for config_rro_disabled, mapped from YANG variable /mpls_state/lsp/secondary_path/config_rro_disabled (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_rro_disabled is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_rro_disabled() directly.
YANG Description: lsp_sec_path_config_rro_disabled
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-rro-disabled", rest_name="config-rro-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_rro_disabled must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-rro-disabled", rest_name="config-rro-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_rro_disabled = t
if hasattr(self, '_set'):
self._set()
def _unset_config_rro_disabled(self):
self.__config_rro_disabled = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-rro-disabled", rest_name="config-rro-disabled", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_hot_standby(self):
"""
Getter method for config_hot_standby, mapped from YANG variable /mpls_state/lsp/secondary_path/config_hot_standby (boolean)
YANG Description: lsp_sec_path_config_hot_standby
"""
return self.__config_hot_standby
def _set_config_hot_standby(self, v, load=False):
"""
Setter method for config_hot_standby, mapped from YANG variable /mpls_state/lsp/secondary_path/config_hot_standby (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_hot_standby is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_hot_standby() directly.
YANG Description: lsp_sec_path_config_hot_standby
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-hot-standby", rest_name="config-hot-standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_hot_standby must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-hot-standby", rest_name="config-hot-standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_hot_standby = t
if hasattr(self, '_set'):
self._set()
def _unset_config_hot_standby(self):
self.__config_hot_standby = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-hot-standby", rest_name="config-hot-standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_pinned(self):
"""
Getter method for config_pinned, mapped from YANG variable /mpls_state/lsp/secondary_path/config_pinned (boolean)
YANG Description: lsp_sec_path_config_pinned
"""
return self.__config_pinned
def _set_config_pinned(self, v, load=False):
"""
Setter method for config_pinned, mapped from YANG variable /mpls_state/lsp/secondary_path/config_pinned (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_pinned is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_pinned() directly.
YANG Description: lsp_sec_path_config_pinned
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-pinned", rest_name="config-pinned", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_pinned must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-pinned", rest_name="config-pinned", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_pinned = t
if hasattr(self, '_set'):
self._set()
def _unset_config_pinned(self):
self.__config_pinned = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-pinned", rest_name="config-pinned", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_persistent(self):
"""
Getter method for config_persistent, mapped from YANG variable /mpls_state/lsp/secondary_path/config_persistent (boolean)
YANG Description: lsp_sec_path_config_persistent
"""
return self.__config_persistent
def _set_config_persistent(self, v, load=False):
"""
Setter method for config_persistent, mapped from YANG variable /mpls_state/lsp/secondary_path/config_persistent (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_persistent is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_persistent() directly.
YANG Description: lsp_sec_path_config_persistent
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-persistent", rest_name="config-persistent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_persistent must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-persistent", rest_name="config-persistent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_persistent = t
if hasattr(self, '_set'):
self._set()
def _unset_config_persistent(self):
self.__config_persistent = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-persistent", rest_name="config-persistent", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_soft_prempt(self):
"""
Getter method for config_soft_prempt, mapped from YANG variable /mpls_state/lsp/secondary_path/config_soft_prempt (boolean)
YANG Description: lsp_sec_path_config_soft_prempt
"""
return self.__config_soft_prempt
def _set_config_soft_prempt(self, v, load=False):
"""
Setter method for config_soft_prempt, mapped from YANG variable /mpls_state/lsp/secondary_path/config_soft_prempt (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_soft_prempt is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_soft_prempt() directly.
YANG Description: lsp_sec_path_config_soft_prempt
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-soft-prempt", rest_name="config-soft-prempt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_soft_prempt must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-soft-prempt", rest_name="config-soft-prempt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_soft_prempt = t
if hasattr(self, '_set'):
self._set()
def _unset_config_soft_prempt(self):
self.__config_soft_prempt = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-soft-prempt", rest_name="config-soft-prempt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_priority_configured(self):
"""
Getter method for config_priority_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_priority_configured (boolean)
YANG Description: lsp_sec_path_config_priority_configured
"""
return self.__config_priority_configured
def _set_config_priority_configured(self, v, load=False):
"""
Setter method for config_priority_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_priority_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_priority_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_priority_configured() directly.
YANG Description: lsp_sec_path_config_priority_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-priority-configured", rest_name="config-priority-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_priority_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-priority-configured", rest_name="config-priority-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_priority_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_priority_configured(self):
self.__config_priority_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-priority-configured", rest_name="config-priority-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_setup_prority(self):
"""
Getter method for config_setup_prority, mapped from YANG variable /mpls_state/lsp/secondary_path/config_setup_prority (uint8)
YANG Description: lsp_sec_path_config_setup_prority
"""
return self.__config_setup_prority
def _set_config_setup_prority(self, v, load=False):
"""
Setter method for config_setup_prority, mapped from YANG variable /mpls_state/lsp/secondary_path/config_setup_prority (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_setup_prority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_setup_prority() directly.
YANG Description: lsp_sec_path_config_setup_prority
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-setup-prority", rest_name="config-setup-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_setup_prority must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-setup-prority", rest_name="config-setup-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__config_setup_prority = t
if hasattr(self, '_set'):
self._set()
def _unset_config_setup_prority(self):
self.__config_setup_prority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-setup-prority", rest_name="config-setup-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_config_holding_prority(self):
"""
Getter method for config_holding_prority, mapped from YANG variable /mpls_state/lsp/secondary_path/config_holding_prority (uint8)
YANG Description: lsp_sec_path_config_holding_prority
"""
return self.__config_holding_prority
def _set_config_holding_prority(self, v, load=False):
"""
Setter method for config_holding_prority, mapped from YANG variable /mpls_state/lsp/secondary_path/config_holding_prority (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_holding_prority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_holding_prority() directly.
YANG Description: lsp_sec_path_config_holding_prority
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-holding-prority", rest_name="config-holding-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_holding_prority must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-holding-prority", rest_name="config-holding-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__config_holding_prority = t
if hasattr(self, '_set'):
self._set()
def _unset_config_holding_prority(self):
self.__config_holding_prority = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-holding-prority", rest_name="config-holding-prority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_config_hop_limit_configured(self):
"""
Getter method for config_hop_limit_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_hop_limit_configured (boolean)
YANG Description: lsp_sec_path_config_hop_limit_configured
"""
return self.__config_hop_limit_configured
def _set_config_hop_limit_configured(self, v, load=False):
"""
Setter method for config_hop_limit_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_hop_limit_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_hop_limit_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_hop_limit_configured() directly.
YANG Description: lsp_sec_path_config_hop_limit_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-hop-limit-configured", rest_name="config-hop-limit-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_hop_limit_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-hop-limit-configured", rest_name="config-hop-limit-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_hop_limit_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_hop_limit_configured(self):
self.__config_hop_limit_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-hop-limit-configured", rest_name="config-hop-limit-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_hop_limit(self):
"""
Getter method for config_hop_limit, mapped from YANG variable /mpls_state/lsp/secondary_path/config_hop_limit (uint8)
YANG Description: lsp_sec_path_config_hop_limit
"""
return self.__config_hop_limit
def _set_config_hop_limit(self, v, load=False):
"""
Setter method for config_hop_limit, mapped from YANG variable /mpls_state/lsp/secondary_path/config_hop_limit (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_hop_limit is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_hop_limit() directly.
YANG Description: lsp_sec_path_config_hop_limit
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-hop-limit", rest_name="config-hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_hop_limit must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-hop-limit", rest_name="config-hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)""",
})
self.__config_hop_limit = t
if hasattr(self, '_set'):
self._set()
def _unset_config_hop_limit(self):
self.__config_hop_limit = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="config-hop-limit", rest_name="config-hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint8', is_config=False)
def _get_config_traffic_eng_rate_configured(self):
"""
Getter method for config_traffic_eng_rate_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_rate_configured (boolean)
YANG Description: lsp_sec_path_config_traffic_eng_rate_configured
"""
return self.__config_traffic_eng_rate_configured
def _set_config_traffic_eng_rate_configured(self, v, load=False):
"""
Setter method for config_traffic_eng_rate_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_rate_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_traffic_eng_rate_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_traffic_eng_rate_configured() directly.
YANG Description: lsp_sec_path_config_traffic_eng_rate_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-traffic-eng-rate-configured", rest_name="config-traffic-eng-rate-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_traffic_eng_rate_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-traffic-eng-rate-configured", rest_name="config-traffic-eng-rate-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_traffic_eng_rate_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_traffic_eng_rate_configured(self):
self.__config_traffic_eng_rate_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-traffic-eng-rate-configured", rest_name="config-traffic-eng-rate-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_traffic_eng_mean_rate(self):
"""
Getter method for config_traffic_eng_mean_rate, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_mean_rate (uint32)
YANG Description: lsp_sec_path_config_traffic_eng_mean_rate
"""
return self.__config_traffic_eng_mean_rate
def _set_config_traffic_eng_mean_rate(self, v, load=False):
"""
Setter method for config_traffic_eng_mean_rate, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_mean_rate (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_traffic_eng_mean_rate is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_traffic_eng_mean_rate() directly.
YANG Description: lsp_sec_path_config_traffic_eng_mean_rate
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-mean-rate", rest_name="config-traffic-eng-mean-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_traffic_eng_mean_rate must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-mean-rate", rest_name="config-traffic-eng-mean-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_traffic_eng_mean_rate = t
if hasattr(self, '_set'):
self._set()
def _unset_config_traffic_eng_mean_rate(self):
self.__config_traffic_eng_mean_rate = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-mean-rate", rest_name="config-traffic-eng-mean-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_traffic_eng_max_rate(self):
"""
Getter method for config_traffic_eng_max_rate, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_max_rate (uint32)
YANG Description: lsp_sec_path_config_traffic_eng_max_rate
"""
return self.__config_traffic_eng_max_rate
def _set_config_traffic_eng_max_rate(self, v, load=False):
"""
Setter method for config_traffic_eng_max_rate, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_max_rate (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_traffic_eng_max_rate is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_traffic_eng_max_rate() directly.
YANG Description: lsp_sec_path_config_traffic_eng_max_rate
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-rate", rest_name="config-traffic-eng-max-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_traffic_eng_max_rate must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-rate", rest_name="config-traffic-eng-max-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_traffic_eng_max_rate = t
if hasattr(self, '_set'):
self._set()
def _unset_config_traffic_eng_max_rate(self):
self.__config_traffic_eng_max_rate = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-rate", rest_name="config-traffic-eng-max-rate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_traffic_eng_max_burst(self):
"""
Getter method for config_traffic_eng_max_burst, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_max_burst (uint32)
YANG Description: lsp_sec_path_config_traffic_eng_max_burst
"""
return self.__config_traffic_eng_max_burst
def _set_config_traffic_eng_max_burst(self, v, load=False):
"""
Setter method for config_traffic_eng_max_burst, mapped from YANG variable /mpls_state/lsp/secondary_path/config_traffic_eng_max_burst (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_traffic_eng_max_burst is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_traffic_eng_max_burst() directly.
YANG Description: lsp_sec_path_config_traffic_eng_max_burst
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-burst", rest_name="config-traffic-eng-max-burst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_traffic_eng_max_burst must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-burst", rest_name="config-traffic-eng-max-burst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__config_traffic_eng_max_burst = t
if hasattr(self, '_set'):
self._set()
def _unset_config_traffic_eng_max_burst(self):
self.__config_traffic_eng_max_burst = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="config-traffic-eng-max-burst", rest_name="config-traffic-eng-max-burst", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_config_abw_configured(self):
"""
Getter method for config_abw_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_abw_configured (boolean)
YANG Description: lsp_sec_path_config_abw_configured
"""
return self.__config_abw_configured
def _set_config_abw_configured(self, v, load=False):
"""
Setter method for config_abw_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_abw_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_abw_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_abw_configured() directly.
YANG Description: lsp_sec_path_config_abw_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-abw-configured", rest_name="config-abw-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_abw_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-abw-configured", rest_name="config-abw-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_abw_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_abw_configured(self):
self.__config_abw_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-abw-configured", rest_name="config-abw-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_config_cspf_computation_mode(self):
"""
Getter method for config_cspf_computation_mode, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cspf_computation_mode (lsp-cspf-computation-mode)
YANG Description: lsp sec path configured cspf computation mode
"""
return self.__config_cspf_computation_mode
def _set_config_cspf_computation_mode(self, v, load=False):
"""
Setter method for config_cspf_computation_mode, mapped from YANG variable /mpls_state/lsp/secondary_path/config_cspf_computation_mode (lsp-cspf-computation-mode)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_cspf_computation_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_cspf_computation_mode() directly.
YANG Description: lsp sec path configured cspf computation mode
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'cspf-computation-mode-default': {'value': 1}, u'cspf-computation-mode-use-bypass-metric': {'value': 2}, u'cspf-computation-mode-use-igp-metric-global': {'value': 7}, u'cspf-computation-mode-use-igp-metric': {'value': 5}, u'cspf-computation-mode-use-te-metric': {'value': 4}, u'cspf-computation-mode-use-bypass-liberal': {'value': 3}, u'cspf-computation-mode-use-te-metric-global': {'value': 6}},), is_leaf=True, yang_name="config-cspf-computation-mode", rest_name="config-cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='lsp-cspf-computation-mode', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_cspf_computation_mode must be of a type compatible with lsp-cspf-computation-mode""",
'defined-type': "brocade-mpls-operational:lsp-cspf-computation-mode",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'cspf-computation-mode-default': {'value': 1}, u'cspf-computation-mode-use-bypass-metric': {'value': 2}, u'cspf-computation-mode-use-igp-metric-global': {'value': 7}, u'cspf-computation-mode-use-igp-metric': {'value': 5}, u'cspf-computation-mode-use-te-metric': {'value': 4}, u'cspf-computation-mode-use-bypass-liberal': {'value': 3}, u'cspf-computation-mode-use-te-metric-global': {'value': 6}},), is_leaf=True, yang_name="config-cspf-computation-mode", rest_name="config-cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='lsp-cspf-computation-mode', is_config=False)""",
})
self.__config_cspf_computation_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_config_cspf_computation_mode(self):
self.__config_cspf_computation_mode = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'cspf-computation-mode-default': {'value': 1}, u'cspf-computation-mode-use-bypass-metric': {'value': 2}, u'cspf-computation-mode-use-igp-metric-global': {'value': 7}, u'cspf-computation-mode-use-igp-metric': {'value': 5}, u'cspf-computation-mode-use-te-metric': {'value': 4}, u'cspf-computation-mode-use-bypass-liberal': {'value': 3}, u'cspf-computation-mode-use-te-metric-global': {'value': 6}},), is_leaf=True, yang_name="config-cspf-computation-mode", rest_name="config-cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='lsp-cspf-computation-mode', is_config=False)
def _get_config_admin_group_configured(self):
"""
Getter method for config_admin_group_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_admin_group_configured (boolean)
YANG Description: lsp_sec_path_config_admin_group_configured
"""
return self.__config_admin_group_configured
def _set_config_admin_group_configured(self, v, load=False):
"""
Setter method for config_admin_group_configured, mapped from YANG variable /mpls_state/lsp/secondary_path/config_admin_group_configured (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_admin_group_configured is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_admin_group_configured() directly.
YANG Description: lsp_sec_path_config_admin_group_configured
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="config-admin-group-configured", rest_name="config-admin-group-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_admin_group_configured must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-admin-group-configured", rest_name="config-admin-group-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__config_admin_group_configured = t
if hasattr(self, '_set'):
self._set()
def _unset_config_admin_group_configured(self):
self.__config_admin_group_configured = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="config-admin-group-configured", rest_name="config-admin-group-configured", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_admin_group_lists(self):
"""
Getter method for admin_group_lists, mapped from YANG variable /mpls_state/lsp/secondary_path/admin_group_lists (list)
"""
return self.__admin_group_lists
def _set_admin_group_lists(self, v, load=False):
"""
Setter method for admin_group_lists, mapped from YANG variable /mpls_state/lsp/secondary_path/admin_group_lists (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_admin_group_lists is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_admin_group_lists() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("list_type",admin_group_lists.admin_group_lists, yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='list-type', extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}), is_container='list', yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """admin_group_lists must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("list_type",admin_group_lists.admin_group_lists, yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='list-type', extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}), is_container='list', yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)""",
})
self.__admin_group_lists = t
if hasattr(self, '_set'):
self._set()
def _unset_admin_group_lists(self):
self.__admin_group_lists = YANGDynClass(base=YANGListType("list_type",admin_group_lists.admin_group_lists, yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='list-type', extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}), is_container='list', yang_name="admin-group-lists", rest_name="admin-group-lists", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-lsp-admin-group-list-admin-group-lists-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='list', is_config=False)
def _get_auto_bandwidth(self):
"""
Getter method for auto_bandwidth, mapped from YANG variable /mpls_state/lsp/secondary_path/auto_bandwidth (container)
"""
return self.__auto_bandwidth
def _set_auto_bandwidth(self, v, load=False):
"""
Setter method for auto_bandwidth, mapped from YANG variable /mpls_state/lsp/secondary_path/auto_bandwidth (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_auto_bandwidth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_auto_bandwidth() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=auto_bandwidth.auto_bandwidth, is_container='container', presence=False, yang_name="auto-bandwidth", rest_name="auto-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-auto-bandwidth-config-auto-bandwidth-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """auto_bandwidth must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=auto_bandwidth.auto_bandwidth, is_container='container', presence=False, yang_name="auto-bandwidth", rest_name="auto-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-auto-bandwidth-config-auto-bandwidth-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)""",
})
self.__auto_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_auto_bandwidth(self):
self.__auto_bandwidth = YANGDynClass(base=auto_bandwidth.auto_bandwidth, is_container='container', presence=False, yang_name="auto-bandwidth", rest_name="auto-bandwidth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-auto-bandwidth-config-auto-bandwidth-2'}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)
path_name = __builtin__.property(_get_path_name)
instance_id = __builtin__.property(_get_instance_id)
path_up = __builtin__.property(_get_path_up)
active = __builtin__.property(_get_active)
committed = __builtin__.property(_get_committed)
is_new_instance = __builtin__.property(_get_is_new_instance)
is_current_secondary = __builtin__.property(_get_is_current_secondary)
is_selected_secondary = __builtin__.property(_get_is_selected_secondary)
config_adaptive = __builtin__.property(_get_config_adaptive)
config_reoptimize_timer_configured = __builtin__.property(_get_config_reoptimize_timer_configured)
config_reoptimize_timer = __builtin__.property(_get_config_reoptimize_timer)
config_reoptimize_timer_count = __builtin__.property(_get_config_reoptimize_timer_count)
config_tspec_mtu_configured = __builtin__.property(_get_config_tspec_mtu_configured)
config_tspec_mtu = __builtin__.property(_get_config_tspec_mtu)
config_cos_configured = __builtin__.property(_get_config_cos_configured)
config_cos = __builtin__.property(_get_config_cos)
config_mtu_configured = __builtin__.property(_get_config_mtu_configured)
config_mtu = __builtin__.property(_get_config_mtu)
config_tie_breaking_configured = __builtin__.property(_get_config_tie_breaking_configured)
config_tie_break_random = __builtin__.property(_get_config_tie_break_random)
config_tie_break_least_fill = __builtin__.property(_get_config_tie_break_least_fill)
config_tie_break_most_fill = __builtin__.property(_get_config_tie_break_most_fill)
config_cspf_disabled = __builtin__.property(_get_config_cspf_disabled)
config_rro_disabled = __builtin__.property(_get_config_rro_disabled)
config_hot_standby = __builtin__.property(_get_config_hot_standby)
config_pinned = __builtin__.property(_get_config_pinned)
config_persistent = __builtin__.property(_get_config_persistent)
config_soft_prempt = __builtin__.property(_get_config_soft_prempt)
config_priority_configured = __builtin__.property(_get_config_priority_configured)
config_setup_prority = __builtin__.property(_get_config_setup_prority)
config_holding_prority = __builtin__.property(_get_config_holding_prority)
config_hop_limit_configured = __builtin__.property(_get_config_hop_limit_configured)
config_hop_limit = __builtin__.property(_get_config_hop_limit)
config_traffic_eng_rate_configured = __builtin__.property(_get_config_traffic_eng_rate_configured)
config_traffic_eng_mean_rate = __builtin__.property(_get_config_traffic_eng_mean_rate)
config_traffic_eng_max_rate = __builtin__.property(_get_config_traffic_eng_max_rate)
config_traffic_eng_max_burst = __builtin__.property(_get_config_traffic_eng_max_burst)
config_abw_configured = __builtin__.property(_get_config_abw_configured)
config_cspf_computation_mode = __builtin__.property(_get_config_cspf_computation_mode)
config_admin_group_configured = __builtin__.property(_get_config_admin_group_configured)
admin_group_lists = __builtin__.property(_get_admin_group_lists)
auto_bandwidth = __builtin__.property(_get_auto_bandwidth)
_pyangbind_elements = {'path_name': path_name, 'instance_id': instance_id, 'path_up': path_up, 'active': active, 'committed': committed, 'is_new_instance': is_new_instance, 'is_current_secondary': is_current_secondary, 'is_selected_secondary': is_selected_secondary, 'config_adaptive': config_adaptive, 'config_reoptimize_timer_configured': config_reoptimize_timer_configured, 'config_reoptimize_timer': config_reoptimize_timer, 'config_reoptimize_timer_count': config_reoptimize_timer_count, 'config_tspec_mtu_configured': config_tspec_mtu_configured, 'config_tspec_mtu': config_tspec_mtu, 'config_cos_configured': config_cos_configured, 'config_cos': config_cos, 'config_mtu_configured': config_mtu_configured, 'config_mtu': config_mtu, 'config_tie_breaking_configured': config_tie_breaking_configured, 'config_tie_break_random': config_tie_break_random, 'config_tie_break_least_fill': config_tie_break_least_fill, 'config_tie_break_most_fill': config_tie_break_most_fill, 'config_cspf_disabled': config_cspf_disabled, 'config_rro_disabled': config_rro_disabled, 'config_hot_standby': config_hot_standby, 'config_pinned': config_pinned, 'config_persistent': config_persistent, 'config_soft_prempt': config_soft_prempt, 'config_priority_configured': config_priority_configured, 'config_setup_prority': config_setup_prority, 'config_holding_prority': config_holding_prority, 'config_hop_limit_configured': config_hop_limit_configured, 'config_hop_limit': config_hop_limit, 'config_traffic_eng_rate_configured': config_traffic_eng_rate_configured, 'config_traffic_eng_mean_rate': config_traffic_eng_mean_rate, 'config_traffic_eng_max_rate': config_traffic_eng_max_rate, 'config_traffic_eng_max_burst': config_traffic_eng_max_burst, 'config_abw_configured': config_abw_configured, 'config_cspf_computation_mode': config_cspf_computation_mode, 'config_admin_group_configured': config_admin_group_configured, 'admin_group_lists': admin_group_lists, 'auto_bandwidth': auto_bandwidth, }
| 75.973441
| 1,982
| 0.766221
| 17,900
| 131,586
| 5.312905
| 0.01419
| 0.037644
| 0.050641
| 0.032555
| 0.954901
| 0.935521
| 0.915921
| 0.897877
| 0.872956
| 0.853598
| 0
| 0.005787
| 0.114967
| 131,586
| 1,731
| 1,983
| 76.017331
| 0.810825
| 0.197323
| 0
| 0.53159
| 0
| 0.045752
| 0.393765
| 0.262245
| 0
| 0
| 0
| 0
| 0
| 1
| 0.140523
| false
| 0.004357
| 0.010893
| 0
| 0.254902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7dfe24c47f27180a9478cedac00f9ebde2a0811
| 16,502
|
py
|
Python
|
pybind/slxos/v16r_1_00b/protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import mep
class cfm_ma_sub_commands(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-interface - based on the path /protocol/cfm/domain-name/ma-name/cfm-ma-sub-commands. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__ccm_interval','__mip_policy','__mep',)
_yang_name = 'cfm-ma-sub-commands'
_rest_name = ''
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__ccm_interval = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'100-ms': {'value': 3}, u'10-seconds': {'value': 5}, u'1-second': {'value': 4}, u'3-ms': {'value': 1}, u'10-ms': {'value': 2}},), default=unicode("10-seconds"), is_leaf=True, yang_name="ccm-interval", rest_name="ccm-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'set CCM interval', u'cli-full-no': None, u'callpoint': u'setDot1agCcmInterval'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='ccm-interval-type', is_config=True)
self.__mip_policy = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'default': {'value': 1}, u'explicit': {'value': 2}},), is_leaf=True, yang_name="mip-policy", rest_name="mip-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set MIP policy', u'cli-full-no': None, u'callpoint': u'setDot1agMipPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='mip-policy-type', is_config=True)
self.__mep = YANGDynClass(base=YANGListType("mep_id",mep.mep, yang_name="mep", rest_name="mep", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='mep-id', extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}), is_container='list', yang_name="mep", rest_name="mep", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'protocol', u'cfm', u'domain-name', u'ma-name', u'cfm-ma-sub-commands']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'protocol', u'cfm', u'domain-name', u'ma-name']
def _get_ccm_interval(self):
"""
Getter method for ccm_interval, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/ccm_interval (ccm-interval-type)
"""
return self.__ccm_interval
def _set_ccm_interval(self, v, load=False):
"""
Setter method for ccm_interval, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/ccm_interval (ccm-interval-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_ccm_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ccm_interval() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'100-ms': {'value': 3}, u'10-seconds': {'value': 5}, u'1-second': {'value': 4}, u'3-ms': {'value': 1}, u'10-ms': {'value': 2}},), default=unicode("10-seconds"), is_leaf=True, yang_name="ccm-interval", rest_name="ccm-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'set CCM interval', u'cli-full-no': None, u'callpoint': u'setDot1agCcmInterval'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='ccm-interval-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ccm_interval must be of a type compatible with ccm-interval-type""",
'defined-type': "brocade-dot1ag:ccm-interval-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'100-ms': {'value': 3}, u'10-seconds': {'value': 5}, u'1-second': {'value': 4}, u'3-ms': {'value': 1}, u'10-ms': {'value': 2}},), default=unicode("10-seconds"), is_leaf=True, yang_name="ccm-interval", rest_name="ccm-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'set CCM interval', u'cli-full-no': None, u'callpoint': u'setDot1agCcmInterval'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='ccm-interval-type', is_config=True)""",
})
self.__ccm_interval = t
if hasattr(self, '_set'):
self._set()
def _unset_ccm_interval(self):
self.__ccm_interval = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'100-ms': {'value': 3}, u'10-seconds': {'value': 5}, u'1-second': {'value': 4}, u'3-ms': {'value': 1}, u'10-ms': {'value': 2}},), default=unicode("10-seconds"), is_leaf=True, yang_name="ccm-interval", rest_name="ccm-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'set CCM interval', u'cli-full-no': None, u'callpoint': u'setDot1agCcmInterval'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='ccm-interval-type', is_config=True)
def _get_mip_policy(self):
"""
Getter method for mip_policy, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mip_policy (mip-policy-type)
"""
return self.__mip_policy
def _set_mip_policy(self, v, load=False):
"""
Setter method for mip_policy, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mip_policy (mip-policy-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_mip_policy is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mip_policy() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'default': {'value': 1}, u'explicit': {'value': 2}},), is_leaf=True, yang_name="mip-policy", rest_name="mip-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set MIP policy', u'cli-full-no': None, u'callpoint': u'setDot1agMipPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='mip-policy-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mip_policy must be of a type compatible with mip-policy-type""",
'defined-type': "brocade-dot1ag:mip-policy-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'default': {'value': 1}, u'explicit': {'value': 2}},), is_leaf=True, yang_name="mip-policy", rest_name="mip-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set MIP policy', u'cli-full-no': None, u'callpoint': u'setDot1agMipPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='mip-policy-type', is_config=True)""",
})
self.__mip_policy = t
if hasattr(self, '_set'):
self._set()
def _unset_mip_policy(self):
self.__mip_policy = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'default': {'value': 1}, u'explicit': {'value': 2}},), is_leaf=True, yang_name="mip-policy", rest_name="mip-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set MIP policy', u'cli-full-no': None, u'callpoint': u'setDot1agMipPolicy'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='mip-policy-type', is_config=True)
def _get_mep(self):
"""
Getter method for mep, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mep (list)
"""
return self.__mep
def _set_mep(self, v, load=False):
"""
Setter method for mep, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mep (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_mep is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mep() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("mep_id",mep.mep, yang_name="mep", rest_name="mep", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='mep-id', extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}), is_container='list', yang_name="mep", rest_name="mep", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mep must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("mep_id",mep.mep, yang_name="mep", rest_name="mep", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='mep-id', extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}), is_container='list', yang_name="mep", rest_name="mep", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='list', is_config=True)""",
})
self.__mep = t
if hasattr(self, '_set'):
self._set()
def _unset_mep(self):
self.__mep = YANGDynClass(base=YANGListType("mep_id",mep.mep, yang_name="mep", rest_name="mep", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='mep-id', extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}), is_container='list', yang_name="mep", rest_name="mep", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure Maintanance EndPoint', u'cli-run-template-enter': u'$(.?:)', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-sequence-commands': None, u'callpoint': u'setDot1agMep', u'cli-mode-name': u'config-cfm-md-ma-mep-$(mep-id)'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='list', is_config=True)
ccm_interval = __builtin__.property(_get_ccm_interval, _set_ccm_interval)
mip_policy = __builtin__.property(_get_mip_policy, _set_mip_policy)
mep = __builtin__.property(_get_mep, _set_mep)
_pyangbind_elements = {'ccm_interval': ccm_interval, 'mip_policy': mip_policy, 'mep': mep, }
| 85.061856
| 1,055
| 0.690038
| 2,337
| 16,502
| 4.674369
| 0.088575
| 0.020505
| 0.041011
| 0.026364
| 0.817008
| 0.786891
| 0.775906
| 0.762175
| 0.762175
| 0.753387
| 0
| 0.008571
| 0.151618
| 16,502
| 193
| 1,056
| 85.502591
| 0.771714
| 0.106351
| 0
| 0.387597
| 0
| 0.023256
| 0.411091
| 0.14697
| 0
| 0
| 0
| 0
| 0
| 1
| 0.093023
| false
| 0
| 0.069767
| 0
| 0.294574
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c52074b71855ef72867102bc5564df2ba1896c19
| 4,619
|
py
|
Python
|
client/src/obc.py
|
estcube/telemetry-forwarding-client
|
be659c8dd8e4bd26d1d1974d63f90acffd150e34
|
[
"MIT"
] | 3
|
2020-06-11T12:34:25.000Z
|
2020-09-16T12:06:32.000Z
|
client/src/obc.py
|
estcube/telemetry-forwarding-client
|
be659c8dd8e4bd26d1d1974d63f90acffd150e34
|
[
"MIT"
] | 57
|
2020-09-16T09:11:04.000Z
|
2022-02-28T01:32:13.000Z
|
client/src/obc.py
|
estcube/Telemetry-Forwarding-Client
|
be659c8dd8e4bd26d1d1974d63f90acffd150e34
|
[
"MIT"
] | null | null | null |
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
from pkg_resources import parse_version
from kaitaistruct import __version__ as ks_version, KaitaiStruct, KaitaiStream, BytesIO
if parse_version(ks_version) < parse_version('0.7'):
raise Exception("Incompatible Kaitai Struct Python API: 0.7 or later is required, but you have %s" % (ks_version))
class Obc(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.reserved = self._io.read_bits_int(1) != 0
self.internal_flash = self._io.read_bits_int(1) != 0
self.internal_sram = self._io.read_bits_int(1) != 0
self.qspi_flash1 = self._io.read_bits_int(1) != 0
self.qspi_flash2 = self._io.read_bits_int(1) != 0
self.fmc_mram = self._io.read_bits_int(1) != 0
self.spi_fram1 = self._io.read_bits_int(1) != 0
self.spi_fram2 = self._io.read_bits_int(1) != 0
self.spi_fram3 = self._io.read_bits_int(1) != 0
self.io_expander = self._io.read_bits_int(1) != 0
self.fmc_mram_temp_sensor = self._io.read_bits_int(1) != 0
self.qspi_flash_temp_sensor = self._io.read_bits_int(1) != 0
self.io_expander_temp_sensor = self._io.read_bits_int(1) != 0
self.rtc = self._io.read_bits_int(1) != 0
self.current_adc = self._io.read_bits_int(1) != 0
self.aocs1_gyro1 = self._io.read_bits_int(1) != 0
self.aocs1_gyro2 = self._io.read_bits_int(1) != 0
self.aocs1_magnet = self._io.read_bits_int(1) != 0
self.aocs1_acc = self._io.read_bits_int(1) != 0
self.aocs1_temp = self._io.read_bits_int(1) != 0
self.aocs2_gyro1 = self._io.read_bits_int(1) != 0
self.aocs2_gyro2 = self._io.read_bits_int(1) != 0
self.aocs2_magnet = self._io.read_bits_int(1) != 0
self.aocs2_acc = self._io.read_bits_int(1) != 0
self.aocs2_temp = self._io.read_bits_int(1) != 0
self.payload_bus = self._io.read_bits_int(1) != 0
self.icp1_bus = self._io.read_bits_int(1) != 0
self.icp2_bus = self._io.read_bits_int(1) != 0
self.reaction1 = self._io.read_bits_int(1) != 0
self.reaction2 = self._io.read_bits_int(1) != 0
self.reaction3 = self._io.read_bits_int(1) != 0
self.oscillator = self._io.read_bits_int(1) != 0
self.err_mcu = self._io.read_bits_int(1) != 0
self.err_internal_flash = self._io.read_bits_int(1) != 0
self.err_internal_sram = self._io.read_bits_int(1) != 0
self.err_qspi_flash1 = self._io.read_bits_int(1) != 0
self.err_qspi_flash2 = self._io.read_bits_int(1) != 0
self.err_fmc_mram = self._io.read_bits_int(1) != 0
self.err_spi_fram1 = self._io.read_bits_int(1) != 0
self.err_spi_fram2 = self._io.read_bits_int(1) != 0
self.err_spi_fram3 = self._io.read_bits_int(1) != 0
self.err_io_expander = self._io.read_bits_int(1) != 0
self.err_mram_temp = self._io.read_bits_int(1) != 0
self.err_qspi_flash_temp = self._io.read_bits_int(1) != 0
self.err_io_expander_temp = self._io.read_bits_int(1) != 0
self.err_rtc = self._io.read_bits_int(1) != 0
self.err_current_adc = self._io.read_bits_int(1) != 0
self.err_aocs1_gyro1 = self._io.read_bits_int(1) != 0
self.err_aocs1_gyro2 = self._io.read_bits_int(1) != 0
self.err_aocs1_magnet = self._io.read_bits_int(1) != 0
self.err_aocs1_acc = self._io.read_bits_int(1) != 0
self.err_aocs1_temp = self._io.read_bits_int(1) != 0
self.err_aocs2_gyro1 = self._io.read_bits_int(1) != 0
self.err_aocs2_gyro2 = self._io.read_bits_int(1) != 0
self.err_aocs2_magnet = self._io.read_bits_int(1) != 0
self.err_aocs2_acc = self._io.read_bits_int(1) != 0
self.err_aocs2_temp = self._io.read_bits_int(1) != 0
self.err_payload_bus = self._io.read_bits_int(1) != 0
self.err_icp1_bus = self._io.read_bits_int(1) != 0
self.err_icp2_bus = self._io.read_bits_int(1) != 0
self.err_reaction1 = self._io.read_bits_int(1) != 0
self.err_reaction2 = self._io.read_bits_int(1) != 0
self.err_reaction3 = self._io.read_bits_int(1) != 0
self.err_oscillator = self._io.read_bits_int(1) != 0
self._io.align_to_byte()
self.fmc_mram_temp = self._io.read_u1()
self.qspi_fram_temp = self._io.read_u1()
self.io_expander_temp = self._io.read_u1()
| 52.488636
| 118
| 0.657285
| 787
| 4,619
| 3.443456
| 0.120712
| 0.161624
| 0.247232
| 0.330627
| 0.823985
| 0.820295
| 0.796679
| 0.796679
| 0.796679
| 0.663838
| 0
| 0.050539
| 0.216064
| 4,619
| 87
| 119
| 53.091954
| 0.697874
| 0.020784
| 0
| 0
| 1
| 0
| 0.018367
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025316
| false
| 0
| 0.025316
| 0
| 0.063291
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c52c02d266fb08aaf3f326c61fa1e270518102e0
| 121
|
py
|
Python
|
cartographer/utils/collections.py
|
Patreon/cartographer
|
fe5c03decf01c9f7894bb9cf1f839af435143527
|
[
"Apache-2.0"
] | 29
|
2016-03-30T00:53:42.000Z
|
2022-03-02T23:45:12.000Z
|
cartographer/utils/collections.py
|
Patreon/cartographer
|
fe5c03decf01c9f7894bb9cf1f839af435143527
|
[
"Apache-2.0"
] | 20
|
2016-04-19T18:34:05.000Z
|
2022-02-14T14:18:33.000Z
|
cartographer/utils/collections.py
|
Patreon/cartographer
|
fe5c03decf01c9f7894bb9cf1f839af435143527
|
[
"Apache-2.0"
] | 5
|
2016-04-28T00:44:24.000Z
|
2019-10-26T08:09:17.000Z
|
def filter_dict(dictionary_to_filter):
return dict((k, v) for k, v in dictionary_to_filter.items() if v is not None)
| 40.333333
| 81
| 0.743802
| 23
| 121
| 3.695652
| 0.652174
| 0.282353
| 0.423529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157025
| 121
| 2
| 82
| 60.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
c52fa39e205177e471e16b57a23781f02f1d2a0d
| 7,345
|
py
|
Python
|
2019/day21_input.py
|
coingraham/adventofcode
|
52b5b3f049242881285d0c2704f44cc1ee2a821e
|
[
"MIT"
] | 5
|
2020-12-04T04:30:17.000Z
|
2021-11-12T11:26:22.000Z
|
2019/day21_input.py
|
coingraham/adventofcode
|
52b5b3f049242881285d0c2704f44cc1ee2a821e
|
[
"MIT"
] | null | null | null |
2019/day21_input.py
|
coingraham/adventofcode
|
52b5b3f049242881285d0c2704f44cc1ee2a821e
|
[
"MIT"
] | null | null | null |
input_data = """109,2050,21101,0,966,1,21101,13,0,0,1106,0,1378,21101,20,0,0,1105,1,1337,21101,0,27,0,1105,1,1279,1208,1,65,748,1005,748,73,1208,1,79,748,1005,748,110,1208,1,78,748,1005,748,132,1208,1,87,748,1005,748,169,1208,1,82,748,1005,748,239,21101,0,1041,1,21102,1,73,0,1105,1,1421,21101,0,78,1,21101,1041,0,2,21102,88,1,0,1106,0,1301,21101,0,68,1,21102,1041,1,2,21102,1,103,0,1106,0,1301,1101,0,1,750,1105,1,298,21102,1,82,1,21102,1,1041,2,21102,1,125,0,1106,0,1301,1102,1,2,750,1106,0,298,21101,0,79,1,21102,1041,1,2,21102,147,1,0,1105,1,1301,21102,84,1,1,21102,1,1041,2,21101,162,0,0,1106,0,1301,1101,0,3,750,1105,1,298,21102,1,65,1,21101,1041,0,2,21101,184,0,0,1106,0,1301,21102,76,1,1,21102,1041,1,2,21101,199,0,0,1106,0,1301,21101,75,0,1,21102,1,1041,2,21101,0,214,0,1105,1,1301,21102,221,1,0,1106,0,1337,21101,10,0,1,21101,0,1041,2,21101,236,0,0,1106,0,1301,1106,0,553,21102,1,85,1,21101,1041,0,2,21101,254,0,0,1106,0,1301,21102,1,78,1,21101,0,1041,2,21102,269,1,0,1106,0,1301,21102,276,1,0,1105,1,1337,21102,1,10,1,21101,1041,0,2,21102,291,1,0,1106,0,1301,1102,1,1,755,1105,1,553,21102,32,1,1,21102,1041,1,2,21101,313,0,0,1105,1,1301,21102,320,1,0,1105,1,1337,21102,1,327,0,1106,0,1279,1202,1,1,749,21102,1,65,2,21102,1,73,3,21101,0,346,0,1105,1,1889,1206,1,367,1007,749,69,748,1005,748,360,1102,1,1,756,1001,749,-64,751,1106,0,406,1008,749,74,748,1006,748,381,1101,-1,0,751,1105,1,406,1008,749,84,748,1006,748,395,1101,0,-2,751,1105,1,406,21102,1100,1,1,21102,1,406,0,1105,1,1421,21102,32,1,1,21101,0,1100,2,21101,421,0,0,1105,1,1301,21101,0,428,0,1106,0,1337,21101,435,0,0,1105,1,1279,2102,1,1,749,1008,749,74,748,1006,748,453,1102,-1,1,752,1105,1,478,1008,749,84,748,1006,748,467,1101,-2,0,752,1105,1,478,21101,1168,0,1,21101,0,478,0,1105,1,1421,21102,485,1,0,1105,1,1337,21101,0,10,1,21101,0,1168,2,21102,500,1,0,1105,1,1301,1007,920,15,748,1005,748,518,21102,1,1209,1,21101,0,518,0,1105,1,1421,1002,920,3,529,1001,529,921,529,101,0,750,0,1001,529,1,537,1002,751,1,0,1001,537,1,545,1001,752,0,0,1001,920,1,920,1105,1,13,1005,755,577,1006,756,570,21102,1,1100,1,21102,1,570,0,1106,0,1421,21101,987,0,1,1105,1,581,21101,1001,0,1,21101,0,588,0,1105,1,1378,1101,758,0,594,101,0,0,753,1006,753,654,21001,753,0,1,21102,610,1,0,1105,1,667,21102,0,1,1,21101,621,0,0,1106,0,1463,1205,1,647,21101,0,1015,1,21102,1,635,0,1106,0,1378,21102,1,1,1,21101,646,0,0,1106,0,1463,99,1001,594,1,594,1105,1,592,1006,755,664,1101,0,0,755,1106,0,647,4,754,99,109,2,1102,726,1,757,22102,1,-1,1,21102,9,1,2,21102,1,697,3,21101,692,0,0,1106,0,1913,109,-2,2105,1,0,109,2,101,0,757,706,2101,0,-1,0,1001,757,1,757,109,-2,2105,1,0,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,255,63,191,159,95,127,223,0,163,166,217,200,238,34,117,94,155,62,55,60,69,46,103,172,98,186,252,79,107,56,171,214,241,220,175,87,61,70,53,113,232,250,246,245,249,174,86,253,78,108,236,137,244,102,162,84,243,213,126,111,77,212,156,158,222,219,35,239,116,120,190,47,100,221,198,118,205,136,185,187,227,123,119,110,121,43,189,143,188,109,138,177,233,57,226,170,202,248,237,152,196,92,114,167,168,229,234,125,157,169,242,59,182,247,99,216,142,42,183,173,106,39,215,207,201,49,115,54,204,76,71,124,178,181,199,38,179,231,228,85,122,154,50,197,139,218,140,58,153,235,206,251,254,184,203,101,68,93,51,230,141,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,73,110,112,117,116,32,105,110,115,116,114,117,99,116,105,111,110,115,58,10,13,10,87,97,108,107,105,110,103,46,46,46,10,10,13,10,82,117,110,110,105,110,103,46,46,46,10,10,25,10,68,105,100,110,39,116,32,109,97,107,101,32,105,116,32,97,99,114,111,115,115,58,10,10,58,73,110,118,97,108,105,100,32,111,112,101,114,97,116,105,111,110,59,32,101,120,112,101,99,116,101,100,32,115,111,109,101,116,104,105,110,103,32,108,105,107,101,32,65,78,68,44,32,79,82,44,32,111,114,32,78,79,84,67,73,110,118,97,108,105,100,32,102,105,114,115,116,32,97,114,103,117,109,101,110,116,59,32,101,120,112,101,99,116,101,100,32,115,111,109,101,116,104,105,110,103,32,108,105,107,101,32,65,44,32,66,44,32,67,44,32,68,44,32,74,44,32,111,114,32,84,40,73,110,118,97,108,105,100,32,115,101,99,111,110,100,32,97,114,103,117,109,101,110,116,59,32,101,120,112,101,99,116,101,100,32,74,32,111,114,32,84,52,79,117,116,32,111,102,32,109,101,109,111,114,121,59,32,97,116,32,109,111,115,116,32,49,53,32,105,110,115,116,114,117,99,116,105,111,110,115,32,99,97,110,32,98,101,32,115,116,111,114,101,100,0,109,1,1005,1262,1270,3,1262,20101,0,1262,0,109,-1,2105,1,0,109,1,21102,1288,1,0,1106,0,1263,21002,1262,1,0,1102,1,0,1262,109,-1,2106,0,0,109,5,21101,0,1310,0,1106,0,1279,22102,1,1,-2,22208,-2,-4,-1,1205,-1,1332,21201,-3,0,1,21102,1332,1,0,1106,0,1421,109,-5,2105,1,0,109,2,21102,1,1346,0,1106,0,1263,21208,1,32,-1,1205,-1,1363,21208,1,9,-1,1205,-1,1363,1106,0,1373,21102,1370,1,0,1106,0,1279,1106,0,1339,109,-2,2106,0,0,109,5,2102,1,-4,1386,20101,0,0,-2,22101,1,-4,-4,21102,1,0,-3,22208,-3,-2,-1,1205,-1,1416,2201,-4,-3,1408,4,0,21201,-3,1,-3,1105,1,1396,109,-5,2105,1,0,109,2,104,10,21201,-1,0,1,21102,1436,1,0,1105,1,1378,104,10,99,109,-2,2106,0,0,109,3,20002,594,753,-1,22202,-1,-2,-1,201,-1,754,754,109,-3,2105,1,0,109,10,21101,5,0,-5,21101,1,0,-4,21102,1,0,-3,1206,-9,1555,21101,3,0,-6,21101,0,5,-7,22208,-7,-5,-8,1206,-8,1507,22208,-6,-4,-8,1206,-8,1507,104,64,1105,1,1529,1205,-6,1527,1201,-7,716,1515,21002,0,-11,-8,21201,-8,46,-8,204,-8,1105,1,1529,104,46,21201,-7,1,-7,21207,-7,22,-8,1205,-8,1488,104,10,21201,-6,-1,-6,21207,-6,0,-8,1206,-8,1484,104,10,21207,-4,1,-8,1206,-8,1569,21102,0,1,-9,1105,1,1689,21208,-5,21,-8,1206,-8,1583,21101,1,0,-9,1106,0,1689,1201,-5,716,1588,21001,0,0,-2,21208,-4,1,-1,22202,-2,-1,-1,1205,-2,1613,21201,-5,0,1,21101,1613,0,0,1105,1,1444,1206,-1,1634,22101,0,-5,1,21102,1,1627,0,1106,0,1694,1206,1,1634,21101,0,2,-3,22107,1,-4,-8,22201,-1,-8,-8,1206,-8,1649,21201,-5,1,-5,1206,-3,1663,21201,-3,-1,-3,21201,-4,1,-4,1106,0,1667,21201,-4,-1,-4,21208,-4,0,-1,1201,-5,716,1676,22002,0,-1,-1,1206,-1,1686,21101,0,1,-4,1106,0,1477,109,-10,2106,0,0,109,11,21102,1,0,-6,21102,1,0,-8,21102,0,1,-7,20208,-6,920,-9,1205,-9,1880,21202,-6,3,-9,1201,-9,921,1725,20101,0,0,-5,1001,1725,1,1733,20102,1,0,-4,22101,0,-4,1,21102,1,1,2,21101,9,0,3,21102,1,1754,0,1106,0,1889,1206,1,1772,2201,-10,-4,1766,1001,1766,716,1766,21002,0,1,-3,1105,1,1790,21208,-4,-1,-9,1206,-9,1786,21201,-8,0,-3,1105,1,1790,21202,-7,1,-3,1001,1733,1,1795,21001,0,0,-2,21208,-2,-1,-9,1206,-9,1812,21201,-8,0,-1,1105,1,1816,22101,0,-7,-1,21208,-5,1,-9,1205,-9,1837,21208,-5,2,-9,1205,-9,1844,21208,-3,0,-1,1106,0,1855,22202,-3,-1,-1,1105,1,1855,22201,-3,-1,-1,22107,0,-1,-1,1106,0,1855,21208,-2,-1,-9,1206,-9,1869,22102,1,-1,-8,1105,1,1873,22102,1,-1,-7,21201,-6,1,-6,1105,1,1708,21202,-8,1,-10,109,-11,2105,1,0,109,7,22207,-6,-5,-3,22207,-4,-6,-2,22201,-3,-2,-1,21208,-1,0,-6,109,-7,2106,0,0,0,109,5,1202,-2,1,1912,21207,-4,0,-1,1206,-1,1930,21101,0,0,-4,21202,-4,1,1,22101,0,-3,2,21102,1,1,3,21102,1949,1,0,1106,0,1954,109,-5,2106,0,0,109,6,21207,-4,1,-1,1206,-1,1977,22207,-5,-3,-1,1206,-1,1977,22102,1,-5,-5,1105,1,2045,21201,-5,0,1,21201,-4,-1,2,21202,-3,2,3,21101,1996,0,0,1105,1,1954,21201,1,0,-5,21102,1,1,-2,22207,-5,-3,-1,1206,-1,2015,21101,0,0,-2,22202,-3,-2,-3,22107,0,-4,-1,1206,-1,2037,21202,-2,1,1,21102,1,2037,0,106,0,1912,21202,-3,-1,-3,22201,-5,-3,-5,109,-6,2105,1,0"""
| 7,345
| 7,345
| 0.687543
| 2,052
| 7,345
| 2.460526
| 0.184211
| 0.037235
| 0.036245
| 0.045157
| 0.276887
| 0.205585
| 0.116855
| 0.097445
| 0.077441
| 0.077441
| 0
| 0.686368
| 0.000272
| 7,345
| 1
| 7,345
| 7,345
| 0.001226
| 0
| 0
| 0
| 0
| 1
| 0.997277
| 0.997277
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c53329de3ea7d9bd985ca5fb1b5d8d143c4eb7ac
| 2,191
|
py
|
Python
|
pyrez/exceptions.py
|
EthanHicks1/Pyrez
|
022d62ae893594c2ddcd7fac5e740c693fd4fd54
|
[
"MIT"
] | null | null | null |
pyrez/exceptions.py
|
EthanHicks1/Pyrez
|
022d62ae893594c2ddcd7fac5e740c693fd4fd54
|
[
"MIT"
] | null | null | null |
pyrez/exceptions.py
|
EthanHicks1/Pyrez
|
022d62ae893594c2ddcd7fac5e740c693fd4fd54
|
[
"MIT"
] | null | null | null |
class CustomException(Exception):
def __init__(self, *args, **kwargs):
return super().__init__(self, *args, **kwargs)
def __str__(self):
return str(self.args [1])
class DeprecatedException(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class DailyLimitException(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class InvalidArgumentException(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class IdOrAuthEmptyException(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class NotFoundException(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class NotSupported(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class SessionLimitException(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class WrongCredentials(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class PaladinsOnlyException(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class SmiteOnlyException(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class RealmRoyaleOnlyException(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class PlayerNotFoundException(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class GetMatchPlayerDetailsException(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class UnexpectedException(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
class RequestErrorException(CustomException):
def __init__(self, *args, **kwargs):
return super().__init__(*args, **kwargs)
| 42.960784
| 54
| 0.700593
| 217
| 2,191
| 6.465438
| 0.124424
| 0.228083
| 0.145403
| 0.218104
| 0.727726
| 0.727726
| 0.727726
| 0.727726
| 0.702067
| 0.702067
| 0
| 0.000536
| 0.149247
| 2,191
| 50
| 55
| 43.82
| 0.752146
| 0
| 0
| 0.62
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.34
| false
| 0
| 0
| 0.34
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
c54de03fd28e53eb54540b034a2e8a1f2994146a
| 3,532
|
py
|
Python
|
graph_test.py
|
MathewMacDougall/Two-Faced-Type
|
53fae81a151fd0689ac7328dda6b3e984c9a42e9
|
[
"MIT"
] | null | null | null |
graph_test.py
|
MathewMacDougall/Two-Faced-Type
|
53fae81a151fd0689ac7328dda6b3e984c9a42e9
|
[
"MIT"
] | 25
|
2020-11-15T05:30:23.000Z
|
2020-12-12T22:03:35.000Z
|
graph_test.py
|
MathewMacDougall/Two-Faced-Type
|
53fae81a151fd0689ac7328dda6b3e984c9a42e9
|
[
"MIT"
] | null | null | null |
import unittest
from graph import Graph
class TestGraph(unittest.TestCase):
def test_create_graph_simple(self):
graph = Graph()
graph.add_edge(0, 1)
graph.add_edge(1, 2)
graph.add_edge(2, 0)
graph.add_edge(2, 0) # Test double edges don't make a difference
self.assertEqual(graph.all_vertices(), {0, 1, 2})
self.assertCountEqual(graph.get_adjacency_list()[0], [1, 2])
self.assertCountEqual(graph.get_adjacency_list()[1], [0, 2])
self.assertCountEqual(graph.get_adjacency_list()[2], [0, 1])
def test_create_graph_complex(self):
graph = Graph()
graph.add_edge(0, 1)
graph.add_edge(1, 2)
graph.add_edge(2, 3)
graph.add_edge(2, 3)
graph.add_edge(2, 4)
graph.add_edge(1, 4)
graph.add_edge(4, 5)
graph.add_edge(5, 0)
self.assertEqual(graph.all_vertices(), {0, 1, 2, 3, 4, 5})
self.assertCountEqual(graph.get_adjacency_list()[0], [1, 5])
self.assertCountEqual(graph.get_adjacency_list()[1], [0, 2, 4])
self.assertCountEqual(graph.get_adjacency_list()[2], [1, 4, 3])
self.assertCountEqual(graph.get_adjacency_list()[3], [2])
self.assertCountEqual(graph.get_adjacency_list()[4], [1, 2, 5])
self.assertCountEqual(graph.get_adjacency_list()[5], [4, 0])
def test_remove_vertex(self):
graph = Graph()
graph.add_edge(0, 1)
graph.add_edge(1, 2)
graph.add_edge(2, 3)
graph.add_edge(2, 3)
graph.add_edge(2, 4)
graph.add_edge(1, 4)
graph.add_edge(4, 5)
graph.add_edge(5, 0)
self.assertEqual(graph.all_vertices(), {0, 1, 2, 3, 4, 5})
print(len(graph.all_vertices()))
graph.remove_vertex(0)
print(len(graph.all_vertices()))
self.assertEqual(graph.all_vertices(), {1, 2, 3, 4, 5})
self.assertCountEqual(graph.get_adjacency_list()[1], [2, 4])
self.assertCountEqual(graph.get_adjacency_list()[2], [1, 4, 3])
self.assertCountEqual(graph.get_adjacency_list()[3], [2])
self.assertCountEqual(graph.get_adjacency_list()[4], [1, 2, 5])
self.assertCountEqual(graph.get_adjacency_list()[5], [4])
graph.remove_vertex(4)
self.assertEqual(graph.all_vertices(), {1, 2, 3, 5})
self.assertCountEqual(graph.get_adjacency_list()[1], [2])
self.assertCountEqual(graph.get_adjacency_list()[2], [1, 3])
self.assertCountEqual(graph.get_adjacency_list()[3], [2])
self.assertCountEqual(graph.get_adjacency_list()[5], [])
def test_is_connected_with_connected_graph(self):
graph = Graph()
graph.add_edge(0, 1)
graph.add_edge(1, 2)
graph.add_edge(2, 3)
graph.add_edge(2, 3)
graph.add_edge(2, 4)
graph.add_edge(1, 4)
graph.add_edge(4, 5)
graph.add_edge(5, 0)
self.assertEqual(graph.all_vertices(), {0, 1, 2, 3, 4, 5})
self.assertTrue(graph.is_connected())
def test_is_connected_with_disconnected_graph(self):
graph = Graph()
graph.add_edge(0, 1)
graph.add_edge(1, 2)
graph.add_edge(2, 3)
graph.add_edge(2, 3)
graph.add_edge(2, 4)
graph.add_edge(1, 4)
graph.add_edge(4, 5)
graph.add_edge(5, 0)
self.assertEqual(graph.all_vertices(), {0, 1, 2, 3, 4, 5})
graph.remove_vertex(2)
self.assertFalse(graph.is_connected())
if __name__ == '__main__':
unittest.main()
| 34.627451
| 72
| 0.610136
| 509
| 3,532
| 4.013752
| 0.094303
| 0.140969
| 0.211454
| 0.246696
| 0.841899
| 0.789525
| 0.789525
| 0.789036
| 0.698972
| 0.611356
| 0
| 0.060708
| 0.239807
| 3,532
| 101
| 73
| 34.970297
| 0.700186
| 0.011608
| 0
| 0.650602
| 0
| 0
| 0.002294
| 0
| 0
| 0
| 0
| 0
| 0.325301
| 1
| 0.060241
| false
| 0
| 0.024096
| 0
| 0.096386
| 0.024096
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d7b2d7375396a8c241a8c99281ec5431deb5055
| 1,257
|
py
|
Python
|
tests/windows/get_physicaldisk/test_getting_unique_ids_from_output.py
|
Abd-Elrazek/InQRy
|
ab9d19a737a41673e8dcc419d49ca0e96476d560
|
[
"MIT"
] | 37
|
2017-05-12T02:32:26.000Z
|
2019-05-03T14:43:08.000Z
|
tests/windows/get_physicaldisk/test_getting_unique_ids_from_output.py
|
Abd-Elrazek/InQRy
|
ab9d19a737a41673e8dcc419d49ca0e96476d560
|
[
"MIT"
] | 11
|
2017-08-27T03:36:18.000Z
|
2018-10-28T01:31:12.000Z
|
tests/windows/get_physicaldisk/test_getting_unique_ids_from_output.py
|
Abd-Elrazek/InQRy
|
ab9d19a737a41673e8dcc419d49ca0e96476d560
|
[
"MIT"
] | 15
|
2019-06-13T11:29:12.000Z
|
2022-02-28T06:40:14.000Z
|
from inqry.system_specs import win_physical_disk
UNIQUE_ID_OUTPUT = """
UniqueId
--------
{256a2559-ce63-5434-1bee-3ff629daa3a7}
{4069d186-f178-856e-cff3-ba250c28446d}
{4da19f06-2e28-2722-a0fb-33c02696abcd}
50014EE20D887D66
eui.0025384161B6798A
5000C5007A75E216
500A07510F1A545C
ATA LITEONIT LMT-256M6M mSATA 256GB TW0XXM305508532M0705
IDE\Diskpacker-virtualbox-iso-1421140659-disk1__F.R7BNPC\5&1944dbef&0&0.0.0:vagrant-2012-r2
"""
def test_creating_list_of_unique_disk_ids():
expected_physical_disks = {'{256a2559-ce63-5434-1bee-3ff629daa3a7}',
'{4069d186-f178-856e-cff3-ba250c28446d}',
'{4da19f06-2e28-2722-a0fb-33c02696abcd}',
'50014EE20D887D66',
'eui.0025384161B6798A',
'5000C5007A75E216',
'500A07510F1A545C',
'ATA LITEONIT LMT-256M6M mSATA 256GB TW0XXM305508532M0705',
"IDE\Diskpacker-virtualbox-iso-1421140659-disk1__F.R7BNPC\5&1944dbef&0&0.0.0:vagrant-2012-r2"}
assert expected_physical_disks == set(win_physical_disk.get_physical_disk_identifiers(UNIQUE_ID_OUTPUT))
| 43.344828
| 125
| 0.638823
| 126
| 1,257
| 6.166667
| 0.515873
| 0.015444
| 0.015444
| 0.05148
| 0.738739
| 0.738739
| 0.738739
| 0.738739
| 0.738739
| 0.738739
| 0
| 0.344828
| 0.261734
| 1,257
| 28
| 126
| 44.892857
| 0.492457
| 0
| 0
| 0
| 0
| 0.08
| 0.564837
| 0.326173
| 0
| 0
| 0
| 0
| 0.04
| 1
| 0.04
| false
| 0
| 0.04
| 0
| 0.08
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3da3144e79a3871eba136a301ca02449b8340d18
| 390
|
py
|
Python
|
pyctogram/instagram_client/relations/__init__.py
|
RuzzyRullezz/pyctogram
|
b811c55dc1c74d57ef489810816322e7f2909f3d
|
[
"MIT"
] | 1
|
2019-12-10T08:01:58.000Z
|
2019-12-10T08:01:58.000Z
|
pyctogram/instagram_client/relations/__init__.py
|
RuzzyRullezz/pyctogram
|
b811c55dc1c74d57ef489810816322e7f2909f3d
|
[
"MIT"
] | null | null | null |
pyctogram/instagram_client/relations/__init__.py
|
RuzzyRullezz/pyctogram
|
b811c55dc1c74d57ef489810816322e7f2909f3d
|
[
"MIT"
] | null | null | null |
from . base import Actions, get_users
def get_followers(username, password, victim_username, proxies=None):
return get_users(username, password, victim_username, proxies=proxies, relation=Actions.followers)
def get_followings(username, password, victim_username, proxies=None):
return get_users(username, password, victim_username, proxies=proxies, relation=Actions.followings)
| 39
| 103
| 0.810256
| 48
| 390
| 6.395833
| 0.333333
| 0.208469
| 0.286645
| 0.390879
| 0.742671
| 0.742671
| 0.742671
| 0.742671
| 0.742671
| 0.742671
| 0
| 0
| 0.102564
| 390
| 9
| 104
| 43.333333
| 0.877143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.8
| 0.2
| 0.4
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 12
|
3dca45f1cb27867b123a5f15fcfde334028fa3ca
| 7,964
|
py
|
Python
|
ogc_edr_lib/ogc_api_collection_metadata.py
|
eugenegesdisc/gmuedr
|
e8b3e5c7b8d18421d875f0f6f778a37a6d8ec3fd
|
[
"MIT"
] | null | null | null |
ogc_edr_lib/ogc_api_collection_metadata.py
|
eugenegesdisc/gmuedr
|
e8b3e5c7b8d18421d875f0f6f778a37a6d8ec3fd
|
[
"MIT"
] | null | null | null |
ogc_edr_lib/ogc_api_collection_metadata.py
|
eugenegesdisc/gmuedr
|
e8b3e5c7b8d18421d875f0f6f778a37a6d8ec3fd
|
[
"MIT"
] | null | null | null |
from typing import Tuple, Union
from aiohttp import web
from ogc_edr_lib.ogc_api import OgcApi
import logging
from ogc_edr_lib.ogc_api_collection_metadata_get_queries import (
OgcApiCollectionMetadataGetQueries)
from ogc_edr_lib.ogc_api_collection_metadata_list_data_items import (
OgcApiCollectionMetadataListDataItems
)
from ogc_edr_lib.ogc_api_collection_metadata_list_data_locations import (
OgcApiCollectionMetadataListDataLocations
)
Logger = logging.getLogger(__name__)
class OgcApiCollectionMetadata(OgcApi):
def list_collection_data_locations(
self, request: web.Request, collection_id, bbox=None,
datetime=None, limit=None) -> web.Response:
"""List available location identifers for the collection
List the locations available for the collection
:param collection_id: Identifier (id) of a specific collection
:type collection_id: str
:param bbox: Only features that have a geometry that intersects the
bounding box are selected. The bounding box is provided as four or six
numbers, depending on whether the coordinate reference system includes
a vertical axis (height or depth):
* Lower left corner, coordinate axis 1
* Lower left corner, coordinate axis 2
* Minimum value, coordinate axis 3 (optional)
* Upper right corner, coordinate axis 1
* Upper right corner, coordinate axis 2
* Maximum value, coordinate axis 3 (optional)
The coordinate reference system of the values is specified by
the `crs` query parameter. If the `crs`
query parameter is not defined the coordinate reference system is
defined by the default `crs`
for the query type. If a default `crs`
has not been defined the values will be assumed to be in the WGS 84
longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84)
coordinate reference system. For WGS 84 longitude/latitude the values
are in most cases the sequence of minimum longitude, minimum latitude,
maximum longitude and maximum latitude. However, in cases where the
box spans the antimeridian the first value (west-most box edge) is
larger than the third value (east-most box edge). If the vertical
axis is included, the third and the sixth number are the bottom and
the top of the 3-dimensional bounding box. If a feature has multiple
spatial geometry properties, it is the decision of the server whether
only a single spatial geometry property is used to determine the
extent or all relevant geometries.
:type bbox: dict | bytes
:param datetime: Either a date-time or an interval, open or closed.
Date and time expressions adhere to RFC 3339. Open intervals are
expressed using double-dots. Examples:
* A date-time: \"2018-02-12T23:20:50Z\"
* A closed interval:
\"2018-02-12T00:00:00Z/2018-03-18T12:31:12Z\"
* Open intervals: \"2018-02-12T00:00:00Z/..\"
or \"../2018-03-18T12:31:12Z\"
Only features that have a temporal property that intersects the
value of `datetime` are selected. If a feature has multiple
temporal properties, it is the decision of the server whether only
a single temporal property is used to determine the extent or all
relevant temporal properties.
:type datetime: str
:param limit: The optional limit parameter limits the number of results
that are presented in the response document. Minimum = 1.
Maximum = 10000. Default = 10.
:type limit: int
"""
ocmeta = OgcApiCollectionMetadataListDataLocations()
headers, status, content = ocmeta.list_collection_data_locations(
request, collection_id, bbox, datetime, limit)
return headers, status, content
def get_queries(
self, request: web.Request, collection_id, f=None):
"""
List query types supported by the collection
This will provide information about the query types that are supported
by the chosen collection Use content negotiation to request HTML or
JSON.
:param collection_id: Identifier (id) of a specific collection
:type collection_id: str
:param f: format to return the data response in
:type f: str
:returns: tuple of headers, status code, content
"""
ocmeta = OgcApiCollectionMetadataGetQueries()
headers, status, content = ocmeta.get_queries(
request, collection_id, f)
return headers, status, content
def list_data_items(
self, request: web.Request, collection_id,
bbox=None, datetime=None, limit=None):
"""List available items
List the items available in the collection accessible via a unique identifier
:param collection_id: Identifier (id) of a specific collection
:type collection_id: str
:param bbox: Only features that have a geometry that intersects the bounding box are selected. The bounding box is provided as four or six numbers, depending on whether the coordinate reference system includes a vertical axis (height or depth): * Lower left corner, coordinate axis 1 * Lower left corner, coordinate axis 2 * Minimum value, coordinate axis 3 (optional) * Upper right corner, coordinate axis 1 * Upper right corner, coordinate axis 2 * Maximum value, coordinate axis 3 (optional) The coordinate reference system of the values is specified by the `crs` query parameter. If the `crs` query parameter is not defined the coordinate reference system is defined by the default `crs` for the query type. If a default `crs` has not been defined the values will be assumed to be in the WGS 84 longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84) coordinate reference system. For WGS 84 longitude/latitude the values are in most cases the sequence of minimum longitude, minimum latitude, maximum longitude and maximum latitude. However, in cases where the box spans the antimeridian the first value (west-most box edge) is larger than the third value (east-most box edge). If the vertical axis is included, the third and the sixth number are the bottom and the top of the 3-dimensional bounding box. If a feature has multiple spatial geometry properties, it is the decision of the server whether only a single spatial geometry property is used to determine the extent or all relevant geometries.
:type bbox: dict | bytes
:param datetime: Either a date-time or an interval, open or closed. Date and time expressions adhere to RFC 3339. Open intervals are expressed using double-dots. Examples: * A date-time: \"2018-02-12T23:20:50Z\" * A closed interval: \"2018-02-12T00:00:00Z/2018-03-18T12:31:12Z\" * Open intervals: \"2018-02-12T00:00:00Z/..\" or \"../2018-03-18T12:31:12Z\" Only features that have a temporal property that intersects the value of `datetime` are selected. If a feature has multiple temporal properties, it is the decision of the server whether only a single temporal property is used to determine the extent or all relevant temporal properties.
:type datetime: str
:param limit: The optional limit parameter limits the number of results that are presented in the response document. Minimum = 1. Maximum = 10000. Default = 10.
:type limit: int
"""
ocmeta = OgcApiCollectionMetadataListDataItems()
headers, status, content = ocmeta.list_data_items(
request, collection_id, bbox, datetime, limit
)
return headers, status, content
| 63.206349
| 1,561
| 0.708815
| 1,108
| 7,964
| 5.046029
| 0.195848
| 0.025756
| 0.035772
| 0.030048
| 0.826507
| 0.810052
| 0.800751
| 0.800751
| 0.794133
| 0.794133
| 0
| 0.039043
| 0.228152
| 7,964
| 125
| 1,562
| 63.712
| 0.870506
| 0.727022
| 0
| 0.085714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.085714
| false
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9a95d81d2c4081cc80031302b6a6bfe2482c9c94
| 167
|
py
|
Python
|
new/views.py
|
Sravan996/django
|
3a982382d5cfe9bfb498534f1effcf58a3771539
|
[
"MIT"
] | null | null | null |
new/views.py
|
Sravan996/django
|
3a982382d5cfe9bfb498534f1effcf58a3771539
|
[
"MIT"
] | null | null | null |
new/views.py
|
Sravan996/django
|
3a982382d5cfe9bfb498534f1effcf58a3771539
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.shortcuts import HttpResponse
# Create your views here.
def index(request):
return HttpResponse('Hello World</en>')
| 20.875
| 41
| 0.790419
| 22
| 167
| 6
| 0.772727
| 0.151515
| 0.287879
| 0.378788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125749
| 167
| 7
| 42
| 23.857143
| 0.90411
| 0.137725
| 0
| 0
| 0
| 0
| 0.112676
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 7
|
9aa0a86fc034faf07525b543313701f15dfaa4e4
| 4,526
|
py
|
Python
|
datasets/datasets.py
|
rioyokotalab/ecl-isvr
|
ae274b1b81b1d1c10db008140c477f5893a0c1c3
|
[
"BSD-4-Clause-UC"
] | null | null | null |
datasets/datasets.py
|
rioyokotalab/ecl-isvr
|
ae274b1b81b1d1c10db008140c477f5893a0c1c3
|
[
"BSD-4-Clause-UC"
] | null | null | null |
datasets/datasets.py
|
rioyokotalab/ecl-isvr
|
ae274b1b81b1d1c10db008140c477f5893a0c1c3
|
[
"BSD-4-Clause-UC"
] | 2
|
2021-09-30T02:13:40.000Z
|
2021-12-14T07:33:28.000Z
|
#! -*- coding:utf-8
from typing import Callable, List, Optional
import numpy as np
import torch
import torchvision
__all__ = ["CIFAR10", "FashionMNIST"]
class CIFAR10(torch.utils.data.Dataset):
def __init__(self,
root: str,
train: bool = True,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
download: bool = False,
indices: List[int] = None,
data_length: int = None,
shuffle: bool = False):
super(CIFAR10, self).__init__()
self.__datas__ = []
self.__labels__ = []
dataset = torchvision.datasets.CIFAR10(root,
train=train,
transform=transform,
target_transform=target_transform,
download=download)
self.__classes__ = dataset.classes
if indices is None:
indices = list(range(len(dataset)))
for i in indices: # load data and catching...
d, l = dataset[i]
self.__datas__.append(d)
self.__labels__.append(l)
self.__length__ = (len(self.data)
if data_length is None else data_length)
self.__indices__ = np.arange(len(self.data))
self.__shuffle__ = shuffle
if self.shuffle:
np.random.shuffle(self.__indices__)
self.__call_count__ = 0
@property
def data(self): return self.__datas__
@property
def label(self): return self.__labels__
@property
def classes(self): return self.__classes__
@property
def indices(self): return self.__indices__
@property
def shuffle(self): return self.__shuffle__
def __len__(self): return self.__length__
def __getitem__(self, idx):
idx = self.indices[idx % len(self.data)]
d = self.data[idx]
l = self.label[idx]
self.__call_count__ += 1
if self.shuffle and self.__call_count__ >= len(self):
np.random.shuffle(self.__indices__)
self.__call_count__ = 0
return d, l
class FashionMNIST(torch.utils.data.Dataset):
def __init__(self,
root: str,
train: bool = True,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
download: bool = False,
indices: List[int] = None,
data_length: int = None,
shuffle: bool = False):
super(FashionMNIST, self).__init__()
self.__datas__ = []
self.__labels__ = []
dataset = torchvision.datasets.FashionMNIST(root,
train=train,
transform=transform,
target_transform=target_transform,
download=download)
self.__classes__ = dataset.classes
if indices is None:
indices = list(range(len(dataset)))
for i in indices: # load data and catching...
d, l = dataset[i]
self.__datas__.append(d)
self.__labels__.append(l)
self.__length__ = (len(self.data)
if data_length is None else data_length)
self.__indices__ = np.arange(len(self.data))
self.__shuffle__ = shuffle
if self.shuffle:
np.random.shuffle(self.__indices__)
self.__call_count__ = 0
@property
def data(self): return self.__datas__
@property
def label(self): return self.__labels__
@property
def classes(self): return self.__classes__
@property
def indices(self): return self.__indices__
@property
def shuffle(self): return self.__shuffle__
def __len__(self): return self.__length__
def __getitem__(self, idx):
idx = self.indices[idx % len(self.data)]
d = self.data[idx]
l = self.label[idx]
self.__call_count__ += 1
if self.shuffle and self.__call_count__ >= len(self):
np.random.shuffle(self.__indices__)
self.__call_count__ = 0
return d, l
| 36.208
| 87
| 0.527176
| 446
| 4,526
| 4.865471
| 0.152466
| 0.0553
| 0.077419
| 0.053456
| 0.917972
| 0.917972
| 0.917972
| 0.917972
| 0.917972
| 0.869124
| 0
| 0.005405
| 0.386876
| 4,526
| 124
| 88
| 36.5
| 0.776577
| 0.015466
| 0
| 0.899083
| 0
| 0
| 0.004389
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.146789
| false
| 0
| 0.036697
| 0.110092
| 0.220183
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
b1353e1a12ba28028561c94ebd3cbfad77dbf672
| 194
|
py
|
Python
|
bentoml/lightgbm.py
|
francoisserra/BentoML
|
213e9e9b39e055286f2649c733907df88e6d2503
|
[
"Apache-2.0"
] | 1
|
2021-06-12T17:04:07.000Z
|
2021-06-12T17:04:07.000Z
|
bentoml/lightgbm.py
|
francoisserra/BentoML
|
213e9e9b39e055286f2649c733907df88e6d2503
|
[
"Apache-2.0"
] | 4
|
2021-05-16T08:06:25.000Z
|
2021-11-13T08:46:36.000Z
|
bentoml/lightgbm.py
|
francoisserra/BentoML
|
213e9e9b39e055286f2649c733907df88e6d2503
|
[
"Apache-2.0"
] | null | null | null |
from ._internal.frameworks.lightgbm import load
from ._internal.frameworks.lightgbm import save
from ._internal.frameworks.lightgbm import load_runner
__all__ = ["load", "load_runner", "save"]
| 32.333333
| 54
| 0.804124
| 24
| 194
| 6.125
| 0.375
| 0.244898
| 0.44898
| 0.612245
| 0.789116
| 0.544218
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092784
| 194
| 5
| 55
| 38.8
| 0.835227
| 0
| 0
| 0
| 0
| 0
| 0.097938
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b15a0f38860998844631ced61f5490b9a9898c55
| 7,135
|
py
|
Python
|
tests/test_detectCompileCommand.py
|
langrind/ccjtools
|
6f92d8cadf24d6e1f26e984df3c11b4d58061053
|
[
"MIT"
] | null | null | null |
tests/test_detectCompileCommand.py
|
langrind/ccjtools
|
6f92d8cadf24d6e1f26e984df3c11b4d58061053
|
[
"MIT"
] | null | null | null |
tests/test_detectCompileCommand.py
|
langrind/ccjtools
|
6f92d8cadf24d6e1f26e984df3c11b4d58061053
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from ccjtools import ccj_make
def test_detectExactSpecifiedCompilerCommandWord():
"""Using -c option, check that the exact word is recognized"""
inputFileName = 'dummy'
parsedArgs = ccj_make.mkccj_parse_args(['progname', inputFileName, '-c', 'mastadon'])
if not parsedArgs:
assert False
# Note that we are basically testing "strcmp()" here. A different test is used
# to check a whole line of input
if not ccj_make.mkccj_is_compiler_command(parsedArgs, "mastadon"):
assert False
if ccj_make.mkccj_is_compiler_command(parsedArgs, "Mastadon"):
assert False
if ccj_make.mkccj_is_compiler_command(parsedArgs, "Mastadon"):
assert False
if ccj_make.mkccj_is_compiler_command(parsedArgs, "mastadon++"):
assert False
if ccj_make.mkccj_is_compiler_command(parsedArgs, "astadon"):
assert False
assert True
def test_detectCompilerWord():
"""Not using -c option, check that plausible compiler commands are recognized"""
inputFileName = 'dummy'
parsedArgs = ccj_make.mkccj_parse_args(['progname', inputFileName])
if not parsedArgs:
assert False
# Note that we are basically testing a regexp single-word match. A different test
# is used to check a whole line of input
if not ccj_make.mkccj_is_compiler_command(parsedArgs, "gcc"):
assert False
if not ccj_make.mkccj_is_compiler_command(parsedArgs, "mastadon-gcc"):
assert False
if not ccj_make.mkccj_is_compiler_command(parsedArgs, "Mastadon-c++"):
assert False
if not ccj_make.mkccj_is_compiler_command(parsedArgs, "gcc"):
assert False
if not ccj_make.mkccj_is_compiler_command(parsedArgs, "c++"):
assert False
if not ccj_make.mkccj_is_compiler_command(parsedArgs, "g++"):
assert False
if ccj_make.mkccj_is_compiler_command(parsedArgs, "mastadon++"):
assert False
if ccj_make.mkccj_is_compiler_command(parsedArgs, "mastadon"):
assert False
assert True
def test_detectExactSpecifiedCompilerCommand():
"""Using -c option, check that lines are recognized correctly"""
inputFileName = 'dummy'
parsedArgs = ccj_make.mkccj_parse_args(['progname', inputFileName, '-c', 'mastadon'])
if not parsedArgs:
assert False
if ccj_make.mkccj_process_line(parsedArgs, {}, [], "mastadons are not bluefish -Itheentireseas"):
assert False
if not ccj_make.mkccj_process_line(parsedArgs, {}, [], "mastadon are not bluefish -Itheentireseas"):
assert False
if ccj_make.mkccj_process_line(parsedArgs, {}, [], "mastadon-gcc mastadon.c -D_THIS_ -D_THAT_ -fno-dependent-clauses-or-santa-clauses-either"):
assert False
bigString = "/opt/gcc-arm-none-eabi-6-2017-q2-update/bin/arm-none-eabi-g++ -DCONFIG_ARCH_BOARD_PX4_FMU_V5 -D__CUSTOM_FILE_IO__ -D__DF_NUTTX -D__PX4_NUTTX -D__STDC_FORMAT_MACROS -isystem ../../platforms/nuttx/NuttX/include/cxx -isystem NuttX/nuttx/include/cxx -isystem NuttX/nuttx/include -I../../boards/px4/fmu-v5/src -I../../platforms/nuttx/src/px4/common/include -I. -Isrc -Isrc/lib -Isrc/modules -I../../platforms/nuttx/src/px4/stm/stm32f7/include -I../../platforms/common/include -I../../src -I../../src/include -I../../src/lib -I../../src/lib/DriverFramework/framework/include -I../../src/lib/matrix -I../../src/modules -I../../src/platforms -INuttX/nuttx/arch/arm/src/armv7-m -INuttX/nuttx/arch/arm/src/chip -INuttX/nuttx/arch/arm/src/common -INuttX/apps/include -mcpu=cortex-m7 -mthumb -mfpu=fpv5-d16 -mfloat-abi=hard -Os -DNDEBUG -g -fdata-sections -ffunction-sections -fomit-frame-pointer -fmerge-all-constants -fno-signed-zeros -fno-trapping-math -freciprocal-math -fno-math-errno -fno-strict-aliasing -fvisibility=hidden -include visibility.h -Wall -Wextra -Werror -Warray-bounds -Wcast-align -Wdisabled-optimization -Wdouble-promotion -Wfatal-errors -Wfloat-equal -Wformat-security -Winit-self -Wlogical-op -Wpointer-arith -Wshadow -Wuninitialized -Wunknown-pragmas -Wunused-variable -Wno-missing-field-initializers -Wno-missing-include-dirs -Wno-unused-parameter -fdiagnostics-color=always -fno-builtin-printf -fno-strength-reduce -Wformat=1 -Wunused-but-set-variable -Wno-format-truncation -fcheck-new -fno-exceptions -fno-rtti -fno-threadsafe-statics -Wreorder -Wno-overloaded-virtual -nostdinc++ -std=gnu++11 -o msg/CMakeFiles/uorb_msgs.dir/topics_sources/uORBTopics.cpp.obj -c /home/langrind/Firmware/build/px4_fmu-v5_multicopter/msg/topics_sources/uORBTopics.cpp"
if ccj_make.mkccj_process_line(parsedArgs, {}, [], bigString):
assert False
assert True
def test_detectCompilerCommandLine():
"""Not using -c option, check that plausible compiler command lines are recognized"""
inputFileName = 'dummy'
parsedArgs = ccj_make.mkccj_parse_args(['progname', inputFileName])
if not parsedArgs:
assert False
if ccj_make.mkccj_process_line(parsedArgs, {}, [], "mastadons are not bluefish -Itheentireseas"):
assert False
if not ccj_make.mkccj_process_line(parsedArgs, {}, [], "mastadon-gcc mastadon.c -D_THIS_ -D_THAT_ -fno-dependent-clauses-or-santa-clauses-either"):
assert False
bigString = "/opt/gcc-arm-none-eabi-6-2017-q2-update/bin/arm-none-eabi-g++ -DCONFIG_ARCH_BOARD_PX4_FMU_V5 -D__CUSTOM_FILE_IO__ -D__DF_NUTTX -D__PX4_NUTTX -D__STDC_FORMAT_MACROS -isystem ../../platforms/nuttx/NuttX/include/cxx -isystem NuttX/nuttx/include/cxx -isystem NuttX/nuttx/include -I../../boards/px4/fmu-v5/src -I../../platforms/nuttx/src/px4/common/include -I. -Isrc -Isrc/lib -Isrc/modules -I../../platforms/nuttx/src/px4/stm/stm32f7/include -I../../platforms/common/include -I../../src -I../../src/include -I../../src/lib -I../../src/lib/DriverFramework/framework/include -I../../src/lib/matrix -I../../src/modules -I../../src/platforms -INuttX/nuttx/arch/arm/src/armv7-m -INuttX/nuttx/arch/arm/src/chip -INuttX/nuttx/arch/arm/src/common -INuttX/apps/include -mcpu=cortex-m7 -mthumb -mfpu=fpv5-d16 -mfloat-abi=hard -Os -DNDEBUG -g -fdata-sections -ffunction-sections -fomit-frame-pointer -fmerge-all-constants -fno-signed-zeros -fno-trapping-math -freciprocal-math -fno-math-errno -fno-strict-aliasing -fvisibility=hidden -include visibility.h -Wall -Wextra -Werror -Warray-bounds -Wcast-align -Wdisabled-optimization -Wdouble-promotion -Wfatal-errors -Wfloat-equal -Wformat-security -Winit-self -Wlogical-op -Wpointer-arith -Wshadow -Wuninitialized -Wunknown-pragmas -Wunused-variable -Wno-missing-field-initializers -Wno-missing-include-dirs -Wno-unused-parameter -fdiagnostics-color=always -fno-builtin-printf -fno-strength-reduce -Wformat=1 -Wunused-but-set-variable -Wno-format-truncation -fcheck-new -fno-exceptions -fno-rtti -fno-threadsafe-statics -Wreorder -Wno-overloaded-virtual -nostdinc++ -std=gnu++11 -o msg/CMakeFiles/uorb_msgs.dir/topics_sources/uORBTopics.cpp.obj -c /home/langrind/Firmware/build/px4_fmu-v5_multicopter/msg/topics_sources/uORBTopics.cpp"
if not ccj_make.mkccj_process_line(parsedArgs, {}, [], bigString):
assert False
assert True
| 62.043478
| 1,789
| 0.737211
| 981
| 7,135
| 5.20897
| 0.234455
| 0.034247
| 0.05636
| 0.035616
| 0.947945
| 0.939726
| 0.92955
| 0.92407
| 0.907436
| 0.904892
| 0
| 0.008511
| 0.12726
| 7,135
| 114
| 1,790
| 62.587719
| 0.812109
| 0.07274
| 0
| 0.746269
| 0
| 0.059701
| 0.608405
| 0.290396
| 0
| 0
| 0
| 0
| 0.41791
| 1
| 0.059701
| false
| 0
| 0.014925
| 0
| 0.074627
| 0.029851
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
493bd803d4c7823847afa2537f0ada612dffc26a
| 154
|
py
|
Python
|
unicode_urls/cms/__init__.py
|
Alexx-G/django-unicode-urls
|
fd4f89181c7172412ddf499efd050119c16c7d43
|
[
"MIT"
] | null | null | null |
unicode_urls/cms/__init__.py
|
Alexx-G/django-unicode-urls
|
fd4f89181c7172412ddf499efd050119c16c7d43
|
[
"MIT"
] | null | null | null |
unicode_urls/cms/__init__.py
|
Alexx-G/django-unicode-urls
|
fd4f89181c7172412ddf499efd050119c16c7d43
|
[
"MIT"
] | null | null | null |
from .urlutils import any_path_re
def patch_djangocms_urls():
import cms.utils.urlutils as cms_urlutils
cms_urlutils.any_path_re = any_path_re
| 19.25
| 45
| 0.792208
| 25
| 154
| 4.48
| 0.52
| 0.1875
| 0.241071
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155844
| 154
| 7
| 46
| 22
| 0.861538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4950f58ba8e9dd8055eb104d658977925fab01b1
| 202
|
py
|
Python
|
src/success_backup_check/tests/test_success_backup_check.py
|
linuxluigi/success-backup-check
|
aa3be2dbd8b0106b931bf226614e05af68034077
|
[
"MIT"
] | null | null | null |
src/success_backup_check/tests/test_success_backup_check.py
|
linuxluigi/success-backup-check
|
aa3be2dbd8b0106b931bf226614e05af68034077
|
[
"MIT"
] | 7
|
2017-10-20T08:14:08.000Z
|
2017-10-31T10:04:19.000Z
|
src/success_backup_check/tests/test_success_backup_check.py
|
linuxluigi/success-backup-check
|
aa3be2dbd8b0106b931bf226614e05af68034077
|
[
"MIT"
] | null | null | null |
import pytest
import success_backup_check
def test_project_defines_author_and_version():
assert hasattr(success_backup_check, '__author__')
assert hasattr(success_backup_check, '__version__')
| 25.25
| 55
| 0.826733
| 25
| 202
| 5.92
| 0.56
| 0.263514
| 0.364865
| 0.351351
| 0.418919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108911
| 202
| 7
| 56
| 28.857143
| 0.822222
| 0
| 0
| 0
| 0
| 0
| 0.10396
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
497c36a9409e9932ee77efb6c8843fae9cedceac
| 1,200
|
py
|
Python
|
prompts/wizard_of_wikipedia.py
|
andreamad8/FSB
|
a81593590189fa5ad1cc37c5857f974effd9750a
|
[
"MIT"
] | 53
|
2021-10-11T03:24:14.000Z
|
2022-03-30T15:17:23.000Z
|
prompts/wizard_of_wikipedia.py
|
andreamad8/FSB
|
a81593590189fa5ad1cc37c5857f974effd9750a
|
[
"MIT"
] | 1
|
2021-12-26T22:48:38.000Z
|
2022-01-15T18:05:32.000Z
|
prompts/wizard_of_wikipedia.py
|
andreamad8/FSB
|
a81593590189fa5ad1cc37c5857f974effd9750a
|
[
"MIT"
] | 5
|
2022-01-27T09:07:39.000Z
|
2022-03-04T08:58:23.000Z
|
def convert_sample_to_shot_wow(sample, with_knowledge=True):
prefix = "Dialogue:\n"
assert len(sample["dialogue"]) == len(sample["meta"])
for turn, meta in zip(sample["dialogue"],sample["meta"]):
prefix += f"User: {turn[0]}" +"\n"
if with_knowledge:
if len(meta)>0:
prefix += f"KB: {meta[0]}" +"\n"
else:
prefix += f"KB: None" +"\n"
if turn[1] == "":
prefix += f"Assistant:"
return prefix
else:
prefix += f"Assistant: {turn[1]}" +"\n"
return prefix
def convert_sample_to_shot_wow_interact(sample, with_knowledge=True):
prefix = "Dialogue:\n"
assert len(sample["dialogue"]) == len(sample["KB_wiki"])
for turn, meta in zip(sample["dialogue"],sample["KB_wiki"]):
prefix += f"User: {turn[0]}" +"\n"
if with_knowledge:
if len(meta)>0:
prefix += f"KB: {meta[0]}" +"\n"
else:
prefix += f"KB: None" +"\n"
if turn[1] == "":
prefix += f"Assistant:"
return prefix
else:
prefix += f"Assistant: {turn[1]}" +"\n"
return prefix
| 33.333333
| 69
| 0.500833
| 145
| 1,200
| 4.041379
| 0.213793
| 0.119454
| 0.061433
| 0.061433
| 0.952218
| 0.952218
| 0.866894
| 0.866894
| 0.744027
| 0.744027
| 0
| 0.012361
| 0.325833
| 1,200
| 35
| 70
| 34.285714
| 0.71199
| 0
| 0
| 0.8125
| 0
| 0
| 0.186667
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.0625
| false
| 0
| 0
| 0
| 0.1875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49905454a4a778d8f4095622f9b3c6a78a737493
| 76,810
|
py
|
Python
|
h1/api/recovery_project_plan_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
h1/api/recovery_project_plan_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
h1/api/recovery_project_plan_api.py
|
hyperonecom/h1-client-python
|
4ce355852ba3120ec1b8f509ab5894a5c08da730
|
[
"MIT"
] | null | null | null |
"""
HyperOne
HyperOne API # noqa: E501
The version of the OpenAPI document: 0.1.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from h1.api_client import ApiClient, Endpoint as _Endpoint
from h1.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from h1.model.event import Event
from h1.model.inline_response400 import InlineResponse400
from h1.model.plan import Plan
from h1.model.recovery_project_plan_create import RecoveryProjectPlanCreate
from h1.model.recovery_project_plan_update import RecoveryProjectPlanUpdate
from h1.model.resource_service import ResourceService
from h1.model.tag import Tag
from h1.model.tag_array import TagArray
class RecoveryProjectPlanApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __recovery_project_plan_create(
self,
project_id,
location_id,
recovery_project_plan_create,
**kwargs
):
"""Create recovery/plan # noqa: E501
Create plan # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_create(project_id, location_id, recovery_project_plan_create, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
recovery_project_plan_create (RecoveryProjectPlanCreate):
Keyword Args:
x_idempotency_key (str): Idempotency key. [optional]
x_dry_run (str): Dry run. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Plan
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['recovery_project_plan_create'] = \
recovery_project_plan_create
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_create = _Endpoint(
settings={
'response_type': (Plan,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan',
'operation_id': 'recovery_project_plan_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'recovery_project_plan_create',
'x_idempotency_key',
'x_dry_run',
],
'required': [
'project_id',
'location_id',
'recovery_project_plan_create',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'recovery_project_plan_create':
(RecoveryProjectPlanCreate,),
'x_idempotency_key':
(str,),
'x_dry_run':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'x_idempotency_key': 'x-idempotency-key',
'x_dry_run': 'x-dry-run',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'recovery_project_plan_create': 'body',
'x_idempotency_key': 'header',
'x_dry_run': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__recovery_project_plan_create
)
def __recovery_project_plan_delete(
self,
project_id,
location_id,
plan_id,
**kwargs
):
"""Delete recovery/plan # noqa: E501
Delete plan # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_delete(project_id, location_id, plan_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
plan_id (str): Plan Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['plan_id'] = \
plan_id
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_delete = _Endpoint(
settings={
'response_type': None,
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan/{planId}',
'operation_id': 'recovery_project_plan_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'plan_id',
],
'required': [
'project_id',
'location_id',
'plan_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'plan_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'plan_id': 'planId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'plan_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__recovery_project_plan_delete
)
def __recovery_project_plan_event_get(
self,
project_id,
location_id,
plan_id,
event_id,
**kwargs
):
"""Get recovery/plan.event # noqa: E501
Get recovery/plan.event # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_event_get(project_id, location_id, plan_id, event_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
plan_id (str): Plan Id
event_id (str): eventId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Event
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['plan_id'] = \
plan_id
kwargs['event_id'] = \
event_id
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_event_get = _Endpoint(
settings={
'response_type': (Event,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan/{planId}/event/{eventId}',
'operation_id': 'recovery_project_plan_event_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'plan_id',
'event_id',
],
'required': [
'project_id',
'location_id',
'plan_id',
'event_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'plan_id':
(str,),
'event_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'plan_id': 'planId',
'event_id': 'eventId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'plan_id': 'path',
'event_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__recovery_project_plan_event_get
)
def __recovery_project_plan_event_list(
self,
project_id,
location_id,
plan_id,
**kwargs
):
"""List recovery/plan.event # noqa: E501
List recovery/plan.event # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_event_list(project_id, location_id, plan_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
plan_id (str): Plan Id
Keyword Args:
limit (float): $limit. [optional] if omitted the server will use the default value of 100
skip (float): $skip. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Event]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['plan_id'] = \
plan_id
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_event_list = _Endpoint(
settings={
'response_type': ([Event],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan/{planId}/event',
'operation_id': 'recovery_project_plan_event_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'plan_id',
'limit',
'skip',
],
'required': [
'project_id',
'location_id',
'plan_id',
],
'nullable': [
],
'enum': [
],
'validation': [
'limit',
]
},
root_map={
'validations': {
('limit',): {
'inclusive_maximum': 1000,
'inclusive_minimum': 1,
},
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'plan_id':
(str,),
'limit':
(float,),
'skip':
(float,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'plan_id': 'planId',
'limit': '$limit',
'skip': '$skip',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'plan_id': 'path',
'limit': 'query',
'skip': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__recovery_project_plan_event_list
)
def __recovery_project_plan_get(
self,
project_id,
location_id,
plan_id,
**kwargs
):
"""Get recovery/plan # noqa: E501
Returns a single plan # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_get(project_id, location_id, plan_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
plan_id (str): Plan Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Plan
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['plan_id'] = \
plan_id
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_get = _Endpoint(
settings={
'response_type': (Plan,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan/{planId}',
'operation_id': 'recovery_project_plan_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'plan_id',
],
'required': [
'project_id',
'location_id',
'plan_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'plan_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'plan_id': 'planId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'plan_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__recovery_project_plan_get
)
def __recovery_project_plan_list(
self,
project_id,
location_id,
**kwargs
):
"""List recovery/plan # noqa: E501
List plan # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_list(project_id, location_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
Keyword Args:
name (str): Filter by name. [optional]
tag_value (str): Filter by tag.value. [optional]
tag_key (str): Filter by tag.key. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Plan]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_list = _Endpoint(
settings={
'response_type': ([Plan],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan',
'operation_id': 'recovery_project_plan_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'name',
'tag_value',
'tag_key',
],
'required': [
'project_id',
'location_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'name':
(str,),
'tag_value':
(str,),
'tag_key':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'name': 'name',
'tag_value': 'tag.value',
'tag_key': 'tag.key',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'name': 'query',
'tag_value': 'query',
'tag_key': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__recovery_project_plan_list
)
def __recovery_project_plan_service_get(
self,
project_id,
location_id,
plan_id,
service_id,
**kwargs
):
"""Get recovery/plan.service # noqa: E501
Get recovery/plan.service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_service_get(project_id, location_id, plan_id, service_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
plan_id (str): Plan Id
service_id (str): serviceId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ResourceService
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['plan_id'] = \
plan_id
kwargs['service_id'] = \
service_id
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_service_get = _Endpoint(
settings={
'response_type': (ResourceService,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan/{planId}/service/{serviceId}',
'operation_id': 'recovery_project_plan_service_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'plan_id',
'service_id',
],
'required': [
'project_id',
'location_id',
'plan_id',
'service_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'plan_id':
(str,),
'service_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'plan_id': 'planId',
'service_id': 'serviceId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'plan_id': 'path',
'service_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__recovery_project_plan_service_get
)
def __recovery_project_plan_service_list(
self,
project_id,
location_id,
plan_id,
**kwargs
):
"""List recovery/plan.service # noqa: E501
List recovery/plan.service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_service_list(project_id, location_id, plan_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
plan_id (str): Plan Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[ResourceService]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['plan_id'] = \
plan_id
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_service_list = _Endpoint(
settings={
'response_type': ([ResourceService],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan/{planId}/service',
'operation_id': 'recovery_project_plan_service_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'plan_id',
],
'required': [
'project_id',
'location_id',
'plan_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'plan_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'plan_id': 'planId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'plan_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__recovery_project_plan_service_list
)
def __recovery_project_plan_tag_create(
self,
project_id,
location_id,
plan_id,
tag,
**kwargs
):
"""Create recovery/plan.tag # noqa: E501
Create recovery/plan.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_tag_create(project_id, location_id, plan_id, tag, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
plan_id (str): Plan Id
tag (Tag):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Tag
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['plan_id'] = \
plan_id
kwargs['tag'] = \
tag
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_tag_create = _Endpoint(
settings={
'response_type': (Tag,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan/{planId}/tag',
'operation_id': 'recovery_project_plan_tag_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'plan_id',
'tag',
],
'required': [
'project_id',
'location_id',
'plan_id',
'tag',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'plan_id':
(str,),
'tag':
(Tag,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'plan_id': 'planId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'plan_id': 'path',
'tag': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__recovery_project_plan_tag_create
)
def __recovery_project_plan_tag_delete(
self,
project_id,
location_id,
plan_id,
tag_id,
**kwargs
):
"""Delete recovery/plan.tag # noqa: E501
Delete recovery/plan.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_tag_delete(project_id, location_id, plan_id, tag_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
plan_id (str): Plan Id
tag_id (str): tagId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['plan_id'] = \
plan_id
kwargs['tag_id'] = \
tag_id
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_tag_delete = _Endpoint(
settings={
'response_type': None,
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan/{planId}/tag/{tagId}',
'operation_id': 'recovery_project_plan_tag_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'plan_id',
'tag_id',
],
'required': [
'project_id',
'location_id',
'plan_id',
'tag_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'plan_id':
(str,),
'tag_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'plan_id': 'planId',
'tag_id': 'tagId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'plan_id': 'path',
'tag_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__recovery_project_plan_tag_delete
)
def __recovery_project_plan_tag_get(
self,
project_id,
location_id,
plan_id,
tag_id,
**kwargs
):
"""Get recovery/plan.tag # noqa: E501
Get recovery/plan.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_tag_get(project_id, location_id, plan_id, tag_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
plan_id (str): Plan Id
tag_id (str): tagId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Tag
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['plan_id'] = \
plan_id
kwargs['tag_id'] = \
tag_id
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_tag_get = _Endpoint(
settings={
'response_type': (Tag,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan/{planId}/tag/{tagId}',
'operation_id': 'recovery_project_plan_tag_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'plan_id',
'tag_id',
],
'required': [
'project_id',
'location_id',
'plan_id',
'tag_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'plan_id':
(str,),
'tag_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'plan_id': 'planId',
'tag_id': 'tagId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'plan_id': 'path',
'tag_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__recovery_project_plan_tag_get
)
def __recovery_project_plan_tag_list(
self,
project_id,
location_id,
plan_id,
**kwargs
):
"""List recovery/plan.tag # noqa: E501
List recovery/plan.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_tag_list(project_id, location_id, plan_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
plan_id (str): Plan Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Tag]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['plan_id'] = \
plan_id
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_tag_list = _Endpoint(
settings={
'response_type': ([Tag],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan/{planId}/tag',
'operation_id': 'recovery_project_plan_tag_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'plan_id',
],
'required': [
'project_id',
'location_id',
'plan_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'plan_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'plan_id': 'planId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'plan_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__recovery_project_plan_tag_list
)
def __recovery_project_plan_tag_put(
self,
project_id,
location_id,
plan_id,
tag_array,
**kwargs
):
"""Replace recovery/plan.tag # noqa: E501
Replace recovery/plan.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_tag_put(project_id, location_id, plan_id, tag_array, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
plan_id (str): Plan Id
tag_array (TagArray):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Tag]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['plan_id'] = \
plan_id
kwargs['tag_array'] = \
tag_array
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_tag_put = _Endpoint(
settings={
'response_type': ([Tag],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan/{planId}/tag',
'operation_id': 'recovery_project_plan_tag_put',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'plan_id',
'tag_array',
],
'required': [
'project_id',
'location_id',
'plan_id',
'tag_array',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'plan_id':
(str,),
'tag_array':
(TagArray,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'plan_id': 'planId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'plan_id': 'path',
'tag_array': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__recovery_project_plan_tag_put
)
def __recovery_project_plan_update(
self,
project_id,
location_id,
plan_id,
recovery_project_plan_update,
**kwargs
):
"""Update recovery/plan # noqa: E501
Returns modified plan # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recovery_project_plan_update(project_id, location_id, plan_id, recovery_project_plan_update, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
plan_id (str): Plan Id
recovery_project_plan_update (RecoveryProjectPlanUpdate):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Plan
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['plan_id'] = \
plan_id
kwargs['recovery_project_plan_update'] = \
recovery_project_plan_update
return self.call_with_http_info(**kwargs)
self.recovery_project_plan_update = _Endpoint(
settings={
'response_type': (Plan,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/recovery/{locationId}/project/{projectId}/plan/{planId}',
'operation_id': 'recovery_project_plan_update',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'plan_id',
'recovery_project_plan_update',
],
'required': [
'project_id',
'location_id',
'plan_id',
'recovery_project_plan_update',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'plan_id':
(str,),
'recovery_project_plan_update':
(RecoveryProjectPlanUpdate,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'plan_id': 'planId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'plan_id': 'path',
'recovery_project_plan_update': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__recovery_project_plan_update
)
| 36.524013
| 137
| 0.442833
| 6,439
| 76,810
| 4.987886
| 0.03308
| 0.043155
| 0.053243
| 0.041411
| 0.943675
| 0.920042
| 0.899804
| 0.897811
| 0.874739
| 0.871439
| 0
| 0.003379
| 0.472204
| 76,810
| 2,102
| 138
| 36.541389
| 0.788851
| 0.289975
| 0
| 0.719784
| 1
| 0
| 0.231413
| 0.050658
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010128
| false
| 0
| 0.008103
| 0
| 0.028359
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
499a70e266d8579796d64d1f4d58f86d8e09e3c3
| 143
|
py
|
Python
|
src/Utilities/__init__.py
|
sigseg5/nometa-tg
|
7d0d9f0cf5d8fd98a3808c07a5c44d30f1b13032
|
[
"MIT"
] | 3
|
2020-12-15T07:44:58.000Z
|
2022-03-11T18:57:44.000Z
|
src/Utilities/__init__.py
|
sigseg5/nometa-tg
|
7d0d9f0cf5d8fd98a3808c07a5c44d30f1b13032
|
[
"MIT"
] | null | null | null |
src/Utilities/__init__.py
|
sigseg5/nometa-tg
|
7d0d9f0cf5d8fd98a3808c07a5c44d30f1b13032
|
[
"MIT"
] | null | null | null |
from src.Utilities import cmd_logger
from src.Utilities import metadata_worker
from src.Utilities import misc
from src.Utilities import runner
| 28.6
| 41
| 0.86014
| 22
| 143
| 5.5
| 0.454545
| 0.231405
| 0.528926
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111888
| 143
| 4
| 42
| 35.75
| 0.952756
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b8da34c95a45838a0718da8340a3212acd784270
| 3,947
|
py
|
Python
|
tests/test_data.py
|
SaiKrishna1207/aos
|
a55a1eed80dc9b21f7e295b265228c0d54072a66
|
[
"Apache-2.0"
] | 3
|
2020-03-03T08:35:42.000Z
|
2020-09-03T09:30:37.000Z
|
tests/test_data.py
|
SaiKrishna1207/aos
|
a55a1eed80dc9b21f7e295b265228c0d54072a66
|
[
"Apache-2.0"
] | 4
|
2020-02-21T12:48:58.000Z
|
2020-04-30T11:12:52.000Z
|
tests/test_data.py
|
SaiKrishna1207/aos
|
a55a1eed80dc9b21f7e295b265228c0d54072a66
|
[
"Apache-2.0"
] | 5
|
2020-03-01T04:14:32.000Z
|
2021-12-11T15:20:42.000Z
|
def get_obj1():
obj = \
{
"sha": "d25341478381063d1c76e81b3a52e0592a7c997f",
"commit": {
"author": {
"name": "Stephen Dolan",
"email": "mu@netsoc.tcd.ie",
"date": "2013-06-22T16:30:59Z"
},
"committer": {
"name": "Stephen Dolan",
"email": "mu@netsoc.tcd.ie",
"date": "2013-06-22T16:30:59Z"
},
"message": "Merge pull request #162 from stedolan/utf8-fixes\n\nUtf8 fixes. Closes #161",
"tree": {
"sha": "6ab697a8dfb5a96e124666bf6d6213822599fb40",
"url": "https://api.github.com/repos/stedolan/jq/git/trees/6ab697a8dfb5a96e124666bf6d6213822599fb40"
},
"url": "https://api.github.com/repos/stedolan/jq/git/commits/d25341478381063d1c76e81b3a52e0592a7c997f",
"comment_count": 0
},
"url": "https://api.github.com/repos/stedolan/jq/commits/d25341478381063d1c76e81b3a52e0592a7c997f",
"html_url": "https://github.com/stedolan/jq/commit/d25341478381063d1c76e81b3a52e0592a7c997f",
"comments_url": "https://api.github.com/repos/stedolan/jq/commits/d25341478381063d1c76e81b3a52e0592a7c997f/comments",
"author": {
"login": "stedolan",
"id": 79765,
"avatar_url": "https://avatars.githubusercontent.com/u/79765?v=3",
"gravatar_id": "",
"url": "https://api.github.com/users/stedolan",
"html_url": "https://github.com/stedolan",
"followers_url": "https://api.github.com/users/stedolan/followers",
"following_url": "https://api.github.com/users/stedolan/following{/other_user}",
"gists_url": "https://api.github.com/users/stedolan/gists{/gist_id}",
"starred_url": "https://api.github.com/users/stedolan/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/stedolan/subscriptions",
"organizations_url": "https://api.github.com/users/stedolan/orgs",
"repos_url": "https://api.github.com/users/stedolan/repos",
"events_url": "https://api.github.com/users/stedolan/events{/privacy}",
"received_events_url": "https://api.github.com/users/stedolan/received_events",
"type": "User",
"site_admin": False
},
"committer": {
"login": "stedolan",
"id": 79765,
"avatar_url": "https://avatars.githubusercontent.com/u/79765?v=3",
"gravatar_id": "",
"url": "https://api.github.com/users/stedolan",
"html_url": "https://github.com/stedolan",
"followers_url": "https://api.github.com/users/stedolan/followers",
"following_url": "https://api.github.com/users/stedolan/following{/other_user}",
"gists_url": "https://api.github.com/users/stedolan/gists{/gist_id}",
"starred_url": "https://api.github.com/users/stedolan/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/stedolan/subscriptions",
"organizations_url": "https://api.github.com/users/stedolan/orgs",
"repos_url": "https://api.github.com/users/stedolan/repos",
"events_url": "https://api.github.com/users/stedolan/events{/privacy}",
"received_events_url": "https://api.github.com/users/stedolan/received_events",
"type": "User",
"site_admin": False
},
"parents": [
{
"sha": "54b9c9bdb225af5d886466d72f47eafc51acb4f7",
"url": "https://api.github.com/repos/stedolan/jq/commits/54b9c9bdb225af5d886466d72f47eafc51acb4f7",
"html_url": "https://github.com/stedolan/jq/commit/54b9c9bdb225af5d886466d72f47eafc51acb4f7"
},
{
"sha": "8b1b503609c161fea4b003a7179b3fbb2dd4345a",
"url": "https://api.github.com/repos/stedolan/jq/commits/8b1b503609c161fea4b003a7179b3fbb2dd4345a",
"html_url": "https://github.com/stedolan/jq/commit/8b1b503609c161fea4b003a7179b3fbb2dd4345a"
}
]
}
return obj
| 49.962025
| 123
| 0.633392
| 405
| 3,947
| 6.074074
| 0.214815
| 0.107317
| 0.11626
| 0.179675
| 0.776829
| 0.776829
| 0.776829
| 0.776829
| 0.731707
| 0.697561
| 0
| 0.126999
| 0.192045
| 3,947
| 79
| 124
| 49.962025
| 0.644403
| 0
| 0
| 0.564103
| 0
| 0
| 0.706687
| 0.047366
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012821
| false
| 0
| 0
| 0
| 0.025641
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
77009347b5bee01d461e0bc59d8b6aa0208dc523
| 7,201
|
py
|
Python
|
ui/Pytest/test_Range.py
|
MoisesHenr/OCEAN
|
e99c853893adc89652794ace62fcc8ffa78aa7ac
|
[
"MIT"
] | 15
|
2021-06-15T13:48:03.000Z
|
2022-01-26T13:51:46.000Z
|
ui/Pytest/test_Range.py
|
MoisesHenr/OCEAN
|
e99c853893adc89652794ace62fcc8ffa78aa7ac
|
[
"MIT"
] | 1
|
2021-07-04T02:58:29.000Z
|
2021-07-04T02:58:29.000Z
|
ui/Pytest/test_Range.py
|
MoisesHenr/OCEAN
|
e99c853893adc89652794ace62fcc8ffa78aa7ac
|
[
"MIT"
] | 2
|
2021-06-21T20:44:01.000Z
|
2021-06-23T11:10:56.000Z
|
# Author: Moises Henrique Pereira
# this class handle the functions tests of controller of the component of the numerical features
import pytest
import sys
from PyQt5 import QtWidgets
from ui.mainTest import StaticObjects
@pytest.mark.parametrize('slider', [1, 2.9, False, ('t1', 't2'), None])
def test_CIR_setSlider_wrong_parameter(slider):
with pytest.raises(AssertionError):
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceSlider3RangesView = StaticObjects.staticCounterfactualInterfaceSlider3RangesView()
counterfactualInterfaceSlider3RangesView.labelSlider.initializeSlider(0, 1, 1)
rangeMin = counterfactualInterfaceSlider3RangesView.labelRangeMinimum
rangeMin.setSlider(slider)
def test_CIR_setSlider_right_parameter():
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceSlider3RangesView = StaticObjects.staticCounterfactualInterfaceSlider3RangesView()
counterfactualInterfaceSlider3RangesView.labelSlider.initializeSlider(0, 1, 1)
rangeMin = counterfactualInterfaceSlider3RangesView.labelRangeMinimum
rangeMin.setSlider(counterfactualInterfaceSlider3RangesView.labelSlider)
def test_CIR_initializeRange_none_min_parameter():
with pytest.raises(AssertionError):
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceSlider3RangesView = StaticObjects.staticCounterfactualInterfaceSlider3RangesView()
counterfactualInterfaceSlider3RangesView.labelSlider.initializeSlider(0, 1, 1)
rangeMin = counterfactualInterfaceSlider3RangesView.labelRangeMinimum
rangeMin.setSlider(counterfactualInterfaceSlider3RangesView.labelSlider)
rangeMin.initializeRange(None, 1, 0.5, 15)
def test_CIR_initializeRange_none_max_parameter():
with pytest.raises(AssertionError):
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceSlider3RangesView = StaticObjects.staticCounterfactualInterfaceSlider3RangesView()
counterfactualInterfaceSlider3RangesView.labelSlider.initializeSlider(0, 1, 1)
rangeMin = counterfactualInterfaceSlider3RangesView.labelRangeMinimum
rangeMin.setSlider(counterfactualInterfaceSlider3RangesView.labelSlider)
rangeMin.initializeRange(0, None, 0.5, 15)
def test_CIR_initializeRange_none_value_parameter():
with pytest.raises(AssertionError):
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceSlider3RangesView = StaticObjects.staticCounterfactualInterfaceSlider3RangesView()
counterfactualInterfaceSlider3RangesView.labelSlider.initializeSlider(0, 1, 1)
rangeMin = counterfactualInterfaceSlider3RangesView.labelRangeMinimum
rangeMin.setSlider(counterfactualInterfaceSlider3RangesView.labelSlider)
rangeMin.initializeRange(0, 1, None, 15)
def test_CIR_initializeRange_none_space_parameter():
with pytest.raises(AssertionError):
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceSlider3RangesView = StaticObjects.staticCounterfactualInterfaceSlider3RangesView()
counterfactualInterfaceSlider3RangesView.labelSlider.initializeSlider(0, 1, 1)
rangeMin = counterfactualInterfaceSlider3RangesView.labelRangeMinimum
rangeMin.setSlider(counterfactualInterfaceSlider3RangesView.labelSlider)
rangeMin.initializeRange(0, 1, 0.5, None)
def test_CIR_initializeRange_right_parameters():
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceSlider3RangesView = StaticObjects.staticCounterfactualInterfaceSlider3RangesView()
counterfactualInterfaceSlider3RangesView.labelSlider.initializeSlider(0, 1, 1)
rangeMin = counterfactualInterfaceSlider3RangesView.labelRangeMinimum
rangeMin.setSlider(counterfactualInterfaceSlider3RangesView.labelSlider)
rangeMin.initializeRange(0, 1, 0.5, 15)
def test_CIR_updateRange_none_min_parameter():
with pytest.raises(AssertionError):
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceSlider3RangesView = StaticObjects.staticCounterfactualInterfaceSlider3RangesView()
counterfactualInterfaceSlider3RangesView.labelSlider.initializeSlider(0, 1, 1)
rangeMin = counterfactualInterfaceSlider3RangesView.labelRangeMinimum
rangeMin.setSlider(counterfactualInterfaceSlider3RangesView.labelSlider)
rangeMin.initializeRange(0, 1, 0.5, 15)
rangeMin.updateRange(None, 1, 0.5)
def test_CIR_updateRange_none_max_parameter():
with pytest.raises(AssertionError):
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceSlider3RangesView = StaticObjects.staticCounterfactualInterfaceSlider3RangesView()
counterfactualInterfaceSlider3RangesView.labelSlider.initializeSlider(0, 1, 1)
rangeMin = counterfactualInterfaceSlider3RangesView.labelRangeMinimum
rangeMin.setSlider(counterfactualInterfaceSlider3RangesView.labelSlider)
rangeMin.initializeRange(0, 1, 0.5, 15)
rangeMin.updateRange(0, None, 0.5)
def test_CIR_updateRange_none_value_parameter():
with pytest.raises(AssertionError):
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceSlider3RangesView = StaticObjects.staticCounterfactualInterfaceSlider3RangesView()
counterfactualInterfaceSlider3RangesView.labelSlider.initializeSlider(0, 1, 1)
rangeMin = counterfactualInterfaceSlider3RangesView.labelRangeMinimum
rangeMin.setSlider(counterfactualInterfaceSlider3RangesView.labelSlider)
rangeMin.initializeRange(0, 1, 0.5, 15)
rangeMin.updateRange(0, 1, None)
def test_CIR_updateRange_right_parameters():
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceSlider3RangesView = StaticObjects.staticCounterfactualInterfaceSlider3RangesView()
counterfactualInterfaceSlider3RangesView.labelSlider.initializeSlider(0, 1, 1)
rangeMin = counterfactualInterfaceSlider3RangesView.labelRangeMinimum
rangeMin.setSlider(counterfactualInterfaceSlider3RangesView.labelSlider)
rangeMin.initializeRange(0, 1, 0.5, 15)
rangeMin.updateRange(0, 1, 0.3)
def test_CIR_setValue_none_parameter():
with pytest.raises(AssertionError):
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceSlider3RangesView = StaticObjects.staticCounterfactualInterfaceSlider3RangesView()
counterfactualInterfaceSlider3RangesView.labelSlider.initializeSlider(0, 1, 1)
rangeMin = counterfactualInterfaceSlider3RangesView.labelRangeMinimum
rangeMin.setSlider(counterfactualInterfaceSlider3RangesView.labelSlider)
rangeMin.initializeRange(0, 1, 0.5, 15)
rangeMin.setValue(None)
def test_CIR_setValue_right_parameters():
app = QtWidgets.QApplication(sys.argv)
counterfactualInterfaceSlider3RangesView = StaticObjects.staticCounterfactualInterfaceSlider3RangesView()
counterfactualInterfaceSlider3RangesView.labelSlider.initializeSlider(0, 1, 1)
rangeMin = counterfactualInterfaceSlider3RangesView.labelRangeMinimum
rangeMin.setSlider(counterfactualInterfaceSlider3RangesView.labelSlider)
rangeMin.initializeRange(0, 1, 0.5, 15)
rangeMin.setValue(0.3)
| 56.257813
| 113
| 0.805583
| 543
| 7,201
| 10.574586
| 0.119705
| 0.222048
| 0.02264
| 0.061129
| 0.924417
| 0.918147
| 0.912748
| 0.902821
| 0.89272
| 0.89272
| 0
| 0.02914
| 0.127899
| 7,201
| 128
| 114
| 56.257813
| 0.885191
| 0.017498
| 0
| 0.733945
| 0
| 0
| 0.001414
| 0
| 0
| 0
| 0
| 0
| 0.082569
| 1
| 0.119266
| false
| 0
| 0.036697
| 0
| 0.155963
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6221a46e082c35a5b882386742c5234fe505e8f6
| 9,529
|
py
|
Python
|
test/propagation_warn_only_test.py
|
lechat/jenkinsflow
|
87396069dda4f0681829e5d4e264e4f09ae34131
|
[
"BSD-3-Clause"
] | null | null | null |
test/propagation_warn_only_test.py
|
lechat/jenkinsflow
|
87396069dda4f0681829e5d4e264e4f09ae34131
|
[
"BSD-3-Clause"
] | null | null | null |
test/propagation_warn_only_test.py
|
lechat/jenkinsflow
|
87396069dda4f0681829e5d4e264e4f09ae34131
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from pytest import raises
from jenkinsflow.flow import serial, parallel, FailedChildJobException, FailedChildJobsException, Propagation, BuildResult
from .framework import api_select
from .framework.utils import pre_existing_fake_cli
def test_propagation_warn_only_serial(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j12_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=2, serial=True)
api.job('j13', exec_time=0.01, max_fails=0, expect_invocations=0, expect_order=None)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3, propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl1:
ctrl1.invoke('j11')
ctrl1.invoke('j12_fail')
ctrl1.invoke('j13')
assert ctrl1.result == BuildResult.UNSTABLE
# Note: the fact that no error was raised also implies that the failure didn't propagate as failure
def test_propagation_warn_only_parallel(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j1_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1)
api.job('j2', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3, propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl1:
ctrl1.invoke('j1_fail')
ctrl1.invoke('j2')
def test_propagation_warn_only_nested_serial_parallel(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=2, serial=True)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=2)
api.job('j23', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=2)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.parallel(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl2:
ctrl2.invoke('j21')
ctrl2.invoke('j22_fail')
ctrl2.invoke('j23')
def test_propagation_warn_only_nested_parallel_serial(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1, serial=True)
api.job('j23', exec_time=0.01, max_fails=0, expect_invocations=0, expect_order=None)
with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.serial(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl2:
ctrl2.invoke('j21')
ctrl2.invoke('j22_fail')
ctrl2.invoke('j23')
def test_propagation_warn_only_nested_serial_serial(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=2)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=2)
api.job('j23', exec_time=0.01, max_fails=0, expect_invocations=0, expect_order=None)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.serial(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl2:
ctrl2.invoke('j21')
ctrl2.invoke('j22_fail')
ctrl2.invoke('j23')
def test_propagation_warn_only_nested_parallel_parallel(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1)
api.job('j23', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.parallel(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl2:
ctrl2.invoke('j21')
ctrl2.invoke('j22_fail')
ctrl2.invoke('j23')
def test_propagation_warn_only_nested_serial_serial_continue(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=2)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=3)
api.job('j23', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=4)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.serial() as ctrl2:
ctrl2.invoke('j21')
with ctrl2.serial(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl3:
ctrl3.invoke('j22_fail')
ctrl2.invoke('j23')
def test_propagation_warn_only_nested_parallel_serial_continue(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1)
api.job('j23', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.serial() as ctrl2:
ctrl2.invoke('j21')
with ctrl2.serial(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl3:
ctrl3.invoke('j22_fail')
ctrl2.invoke('j23')
def test_propagation_warn_only_nested_serial_serial_continue_fail(api_type):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=2)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=3)
api.job('j23_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=4)
with raises(FailedChildJobException):
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.serial() as ctrl2:
ctrl2.invoke('j21')
with ctrl2.serial(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl3:
ctrl3.invoke('j22_fail')
ctrl2.invoke('j23_fail')
def test_propagation_warn_only_nested_parallel_serial_continue_fail(api_type):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1)
api.job('j23_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1)
with raises(FailedChildJobsException):
with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.serial() as ctrl2:
ctrl2.invoke('j21')
with ctrl2.serial(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl3:
ctrl3.invoke('j22_fail')
ctrl2.invoke('j23_fail')
| 50.68617
| 149
| 0.683702
| 1,409
| 9,529
| 4.314407
| 0.074521
| 0.046389
| 0.054779
| 0.066952
| 0.909854
| 0.900806
| 0.900806
| 0.89291
| 0.89291
| 0.88633
| 0
| 0.062368
| 0.204114
| 9,529
| 187
| 150
| 50.957219
| 0.739188
| 0.024137
| 0
| 0.769231
| 0
| 0
| 0.036364
| 0
| 0
| 0
| 0
| 0
| 0.006993
| 1
| 0.06993
| false
| 0
| 0.027972
| 0
| 0.097902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62691bca9ef85cd31b36e1e397faed73d833bd04
| 2,992
|
py
|
Python
|
tests/test_year_2018.py
|
l0pht511/jpholiday
|
083145737b61fad3420c066968c4329d17dc3baf
|
[
"MIT"
] | 179
|
2017-10-05T12:41:10.000Z
|
2022-03-24T22:18:25.000Z
|
tests/test_year_2018.py
|
l0pht511/jpholiday
|
083145737b61fad3420c066968c4329d17dc3baf
|
[
"MIT"
] | 17
|
2018-10-23T00:51:13.000Z
|
2021-11-22T11:40:06.000Z
|
tests/test_year_2018.py
|
l0pht511/jpholiday
|
083145737b61fad3420c066968c4329d17dc3baf
|
[
"MIT"
] | 17
|
2018-10-19T11:13:07.000Z
|
2022-01-29T08:05:56.000Z
|
# coding: utf-8
import datetime
import unittest
import jpholiday
class TestYear2018(unittest.TestCase):
def test_holiday(self):
"""
2018年祝日
"""
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 1, 1)), '元日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 1, 8)), '成人の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 2, 11)), '建国記念の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 2, 12)), '建国記念の日 振替休日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 3, 21)), '春分の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 4, 29)), '昭和の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 4, 30)), '昭和の日 振替休日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 5, 3)), '憲法記念日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 5, 4)), 'みどりの日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 5, 5)), 'こどもの日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 7, 16)), '海の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 8, 11)), '山の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 9, 17)), '敬老の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 9, 23)), '秋分の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 9, 24)), '秋分の日 振替休日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 10, 8)), '体育の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 11, 3)), '文化の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 11, 23)), '勤労感謝の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 12, 23)), '天皇誕生日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2018, 12, 24)), '天皇誕生日 振替休日')
def test_count_month(self):
"""
2018年月祝日数
"""
self.assertEqual(len(jpholiday.month_holidays(2018, 1)), 2)
self.assertEqual(len(jpholiday.month_holidays(2018, 2)), 2)
self.assertEqual(len(jpholiday.month_holidays(2018, 3)), 1)
self.assertEqual(len(jpholiday.month_holidays(2018, 4)), 2)
self.assertEqual(len(jpholiday.month_holidays(2018, 5)), 3)
self.assertEqual(len(jpholiday.month_holidays(2018, 6)), 0)
self.assertEqual(len(jpholiday.month_holidays(2018, 7)), 1)
self.assertEqual(len(jpholiday.month_holidays(2018, 8)), 1)
self.assertEqual(len(jpholiday.month_holidays(2018, 9)), 3)
self.assertEqual(len(jpholiday.month_holidays(2018, 10)), 1)
self.assertEqual(len(jpholiday.month_holidays(2018, 11)), 2)
self.assertEqual(len(jpholiday.month_holidays(2018, 12)), 2)
def test_count_year(self):
"""
2018年祝日数
"""
self.assertEqual(len(jpholiday.year_holidays(2018)), 20)
| 53.428571
| 94
| 0.684492
| 390
| 2,992
| 5.102564
| 0.164103
| 0.248744
| 0.241206
| 0.261307
| 0.819095
| 0.819095
| 0.819095
| 0.774874
| 0.548744
| 0.495477
| 0
| 0.094741
| 0.167447
| 2,992
| 55
| 95
| 54.4
| 0.704135
| 0.013703
| 0
| 0
| 0
| 0
| 0.03714
| 0
| 0
| 0
| 0
| 0
| 0.825
| 1
| 0.075
| false
| 0
| 0.075
| 0
| 0.175
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
658e190370f91502c18753af3de961237b0e0150
| 129
|
py
|
Python
|
model/__init__.py
|
Pearl-UTexas/DUST-net
|
debea05a04e9340109176c7803909b50f84892ba
|
[
"MIT"
] | null | null | null |
model/__init__.py
|
Pearl-UTexas/DUST-net
|
debea05a04e9340109176c7803909b50f84892ba
|
[
"MIT"
] | null | null | null |
model/__init__.py
|
Pearl-UTexas/DUST-net
|
debea05a04e9340109176c7803909b50f84892ba
|
[
"MIT"
] | null | null | null |
from .von_mises_stiefel import *
from .von_mises_fisher import *
from .model import *
from .metrics import *
from .loss import *
| 21.5
| 32
| 0.767442
| 19
| 129
| 5
| 0.473684
| 0.421053
| 0.252632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155039
| 129
| 5
| 33
| 25.8
| 0.87156
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
65c2afb8b2d130681f854965474e19205bdcd378
| 5,087
|
py
|
Python
|
tests/test_observable/test_dowhile.py
|
yutiansut/RxPY
|
c3bbba77f9ebd7706c949141725e220096deabd4
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2018-11-16T09:07:13.000Z
|
2018-11-16T09:07:13.000Z
|
tests/test_observable/test_dowhile.py
|
yutiansut/RxPY
|
c3bbba77f9ebd7706c949141725e220096deabd4
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
tests/test_observable/test_dowhile.py
|
yutiansut/RxPY
|
c3bbba77f9ebd7706c949141725e220096deabd4
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-05-08T08:23:08.000Z
|
2020-05-08T08:23:08.000Z
|
import unittest
from rx.testing import TestScheduler, ReactiveTest
class TestDoWhile(ReactiveTest, unittest.TestCase):
def test_dowhile_always_false(self):
scheduler = TestScheduler()
xs = scheduler.create_cold_observable(
self.on_next(50, 1),
self.on_next(100, 2),
self.on_next(150, 3),
self.on_next(200, 4),
self.on_completed(250))
def create():
return xs.do_while(lambda _: False)
results = scheduler.start(create=create)
assert results.messages == [
self.on_next(250, 1),
self.on_next(300, 2),
self.on_next(350, 3),
self.on_next(400, 4),
self.on_completed(450)]
assert xs.subscriptions == [self.subscribe(200, 450)]
def test_dowhile_always_true(self):
scheduler = TestScheduler()
xs = scheduler.create_cold_observable(
self.on_next(50, 1),
self.on_next(100, 2),
self.on_next(150, 3),
self.on_next(200, 4),
self.on_completed(250))
def create():
return xs.do_while(lambda _: True)
results = scheduler.start(create=create)
assert results.messages == [
self.on_next(250, 1),
self.on_next(300, 2),
self.on_next(350, 3),
self.on_next(400, 4),
self.on_next(500, 1),
self.on_next(550, 2),
self.on_next(600, 3),
self.on_next(650, 4),
self.on_next(750, 1),
self.on_next(800, 2),
self.on_next(850, 3),
self.on_next(900, 4)]
assert xs.subscriptions == [
self.subscribe(200, 450),
self.subscribe(450, 700),
self.subscribe(700, 950),
self.subscribe(950, 1000)]
def test_dowhile_always_true_on_error(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_cold_observable(self.on_error(50, ex))
def create():
return xs.do_while(lambda _: True)
results = scheduler.start(create=create)
assert results.messages == [self.on_error(250, ex)]
assert xs.subscriptions == [self.subscribe(200, 250)]
def test_dowhile_always_true_infinite(self):
scheduler = TestScheduler()
xs = scheduler.create_cold_observable(self.on_next(50, 1))
def create():
return xs.do_while(lambda _: True)
results = scheduler.start(create=create)
assert results.messages == [
self.on_next(250, 1)]
assert xs.subscriptions == [self.subscribe(200, 1000)]
def test_dowhile_sometimes_true(self):
scheduler = TestScheduler()
xs = scheduler.create_cold_observable(
self.on_next(50, 1),
self.on_next(100, 2),
self.on_next(150, 3),
self.on_next(200, 4),
self.on_completed(250))
n = [0]
def create():
def condition(x):
n[0] += 1
return n[0]<3
return xs.do_while(condition)
results = scheduler.start(create=create)
assert results.messages == [
self.on_next(250, 1),
self.on_next(300, 2),
self.on_next(350, 3),
self.on_next(400, 4),
self.on_next(500, 1),
self.on_next(550, 2),
self.on_next(600, 3),
self.on_next(650, 4),
self.on_next(750, 1),
self.on_next(800, 2),
self.on_next(850, 3),
self.on_next(900, 4),
self.on_completed(950)]
assert xs.subscriptions == [
self.subscribe(200, 450),
self.subscribe(450, 700),
self.subscribe(700, 950)]
def test_dowhile_sometimes_throws(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_cold_observable(
self.on_next(50, 1),
self.on_next(100, 2),
self.on_next(150, 3),
self.on_next(200, 4),
self.on_completed(250))
n = [0]
def create():
def condition(x):
n[0] += 1
if n[0]<3:
return True
else:
raise Exception(ex)
return xs.do_while(condition)
results = scheduler.start(create=create)
assert results.messages == [
self.on_next(250, 1),
self.on_next(300, 2),
self.on_next(350, 3),
self.on_next(400, 4),
self.on_next(500, 1),
self.on_next(550, 2),
self.on_next(600, 3),
self.on_next(650, 4),
self.on_next(750, 1),
self.on_next(800, 2),
self.on_next(850, 3),
self.on_next(900, 4),
self.on_error(950, ex)]
assert xs.subscriptions == [
self.subscribe(200, 450),
self.subscribe(450, 700),
self.subscribe(700, 950)]
| 31.79375
| 68
| 0.525654
| 621
| 5,087
| 4.128824
| 0.115942
| 0.156786
| 0.226209
| 0.060062
| 0.875585
| 0.849064
| 0.834633
| 0.803042
| 0.803042
| 0.803042
| 0
| 0.104478
| 0.354629
| 5,087
| 159
| 69
| 31.993711
| 0.676515
| 0
| 0
| 0.797101
| 0
| 0
| 0.000786
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 1
| 0.101449
| false
| 0
| 0.014493
| 0.028986
| 0.181159
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
65cff554030214e04d5a8a2df9a42dced600b89e
| 11,487
|
py
|
Python
|
test/nn/test_nonlinearities_fliprotations.py
|
steven-lang/e2cnn
|
48f49760766ec958b52d0dd7b02483886dfa2096
|
[
"BSD-3-Clause"
] | 356
|
2019-11-22T10:37:22.000Z
|
2022-03-25T14:42:45.000Z
|
test/nn/test_nonlinearities_fliprotations.py
|
steven-lang/e2cnn
|
48f49760766ec958b52d0dd7b02483886dfa2096
|
[
"BSD-3-Clause"
] | 52
|
2020-01-20T16:51:36.000Z
|
2022-03-31T21:40:19.000Z
|
test/nn/test_nonlinearities_fliprotations.py
|
steven-lang/e2cnn
|
48f49760766ec958b52d0dd7b02483886dfa2096
|
[
"BSD-3-Clause"
] | 48
|
2019-12-11T09:29:30.000Z
|
2022-03-18T17:51:55.000Z
|
import unittest
from unittest import TestCase
from e2cnn.nn import *
from e2cnn.gspaces import *
import random
class TestNonLinearitiesFlipRotations(TestCase):
def test_dihedral_norm_relu(self):
N = 8
g = FlipRot2dOnR2(N)
r = FieldType(g, list(g.representations.values()) * 4)
nnl = NormNonLinearity(r, function='n_relu')
nnl.check_equivariance()
def test_dihedral_norm_sigmoid(self):
N = 8
g = FlipRot2dOnR2(N)
r = FieldType(g, list(g.representations.values()) * 4)
nnl = NormNonLinearity(r, function='n_sigmoid')
nnl.check_equivariance()
def test_dihedral_pointwise_relu(self):
N = 8
g = FlipRot2dOnR2(N)
reprs = [r for r in g.representations.values() if 'pointwise' in r.supported_nonlinearities]
r = FieldType(g, reprs)
nnl = PointwiseNonLinearity(r, function='p_relu')
nnl.check_equivariance()
def test_dihedral_pointwise_sigmoid(self):
N = 8
g = FlipRot2dOnR2(N)
reprs = [r for r in g.representations.values() if 'pointwise' in r.supported_nonlinearities]
r = FieldType(g, reprs)
nnl = PointwiseNonLinearity(r, function='p_sigmoid')
nnl.check_equivariance()
def test_dihedral_gated_one_input_shuffled_gated(self):
N = 8
g = FlipRot2dOnR2(N)
reprs = [r for r in g.representations.values() if 'gated' in r.supported_nonlinearities] * 3
ngates = len(reprs)
reprs += [g.trivial_repr] * ngates
gates = ['gated'] * ngates + ['gate'] * ngates
r = FieldType(g, reprs)
nnl = GatedNonLinearity1(r, gates=gates)
nnl.check_equivariance()
def test_dihedral_gated_one_input_sorted_gated(self):
N = 8
g = FlipRot2dOnR2(N)
reprs = [r for r in g.representations.values() if 'gated' in r.supported_nonlinearities] * 3
r = FieldType(g, reprs).sorted()
ngates = len(r)
reprs = [g.trivial_repr] * ngates
gates = ['gated'] * ngates + ['gate'] * ngates
r = r + FieldType(g, reprs)
nnl = GatedNonLinearity1(r, gates=gates)
nnl.check_equivariance()
def test_dihedral_gated_one_input_all_shuffled(self):
N = 8
g = FlipRot2dOnR2(N)
reprs = [r for r in g.representations.values() if 'gated' in r.supported_nonlinearities] * 2
ngates = len(reprs)
reprs += [g.trivial_repr] * ngates
gates = ['gated'] * ngates + ['gate'] * ngates
t = list(zip(reprs, gates))
random.shuffle(t)
reprs, gates = zip(*t)
r = FieldType(g, reprs)
nnl = GatedNonLinearity1(r, gates=gates)
nnl.check_equivariance()
def test_dihedral_gated_two_inputs_shuffled_gated(self):
N = 8
g = FlipRot2dOnR2(N)
gated = [r for r in g.representations.values() if 'gated' in r.supported_nonlinearities] * 3
ngates = len(gated)
gates = [g.trivial_repr] * ngates
gates = FieldType(g, gates)
gated = FieldType(g, gated)
nnl = GatedNonLinearity2((gates, gated))
nnl.check_equivariance()
def test_dihedral_gated_two_inputs_sorted_gated(self):
N = 8
g = FlipRot2dOnR2(N)
gated = [r for r in g.representations.values() if 'gated' in r.supported_nonlinearities] * 2
ngates = len(gated)
gates = [g.trivial_repr] * ngates
gates = FieldType(g, gates)
gated = FieldType(g, gated).sorted()
nnl = GatedNonLinearity2((gates, gated))
nnl.check_equivariance()
def test_dihedral_concat_relu(self):
N = 8
g = FlipRot2dOnR2(N)
reprs = [r for r in g.representations.values() if 'concatenated' in r.supported_nonlinearities]
for rep in reprs:
r = FieldType(g, [rep])
nnl = ConcatenatedNonLinearity(r, function='c_relu')
nnl.check_equivariance()
def test_dihedral_induced_norm_relu(self):
N = 9
g = FlipRot2dOnR2(N)
sg_id = (None, N)
so2, _, _ = g.fibergroup.subgroup(sg_id)
r = FieldType(g, [g.induced_repr(sg_id, so2.irrep(k)) for k in range(1, int(N // 2))] * 4).sorted()
nnl = InducedNormNonLinearity(r, function='n_relu')
nnl.check_equivariance()
def test_o2_induced_norm_relu(self):
g = FlipRot2dOnR2(-1, 10)
sg_id = (None, -1)
so2, _, _ = g.fibergroup.subgroup(sg_id)
r = FieldType(g, [g.induced_repr(sg_id, so2.irrep(k)) for k in range(1, 7)] * 4).sorted()
nnl = InducedNormNonLinearity(r, function='n_relu')
nnl.check_equivariance()
def test_o2_induced_gated(self):
g = FlipRot2dOnR2(-1, 10)
sg_id = (None, -1)
so2, _, _ = g.fibergroup.subgroup(sg_id)
reprs = [g.induced_repr(sg_id, so2.irrep(k)) for k in range(1, 3)] * 5
ngates = len(reprs)
reprs += [g.induced_repr(sg_id, so2.trivial_representation)] * ngates
gates = ['gated'] * ngates + ['gate'] * ngates
r = FieldType(g, reprs)
nnl = InducedGatedNonLinearity1(r, gates=gates)
nnl.check_equivariance()
def test_o2_norm_relu(self):
g = FlipRot2dOnR2(-1, 10)
r = FieldType(g, list(g.representations.values()) * 4)
nnl = NormNonLinearity(r, function='n_relu')
nnl.check_equivariance()
def test_o2_norm_sigmoid(self):
g = FlipRot2dOnR2(-1, 10)
r = FieldType(g, list(g.representations.values()) * 4)
nnl = NormNonLinearity(r, function='n_sigmoid')
nnl.check_equivariance()
def test_o2_pointwise_relu(self):
g = FlipRot2dOnR2(-1, 10)
reprs = [r for r in g.representations.values() if 'pointwise' in r.supported_nonlinearities]
r = FieldType(g, reprs)
nnl = PointwiseNonLinearity(r, function='p_relu')
nnl.check_equivariance()
def test_o2_pointwise_sigmoid(self):
g = FlipRot2dOnR2(-1, 10)
reprs = [r for r in g.representations.values() if 'pointwise' in r.supported_nonlinearities]
r = FieldType(g, reprs)
nnl = PointwiseNonLinearity(r, function='p_sigmoid')
nnl.check_equivariance()
def test_o2_gated_one_input_shuffled_gated(self):
g = FlipRot2dOnR2(-1, 10)
reprs = [r for r in g.representations.values() if 'gated' in r.supported_nonlinearities] * 3
ngates = len(reprs)
reprs += [g.trivial_repr] * ngates
gates = ['gated'] * ngates + ['gate'] * ngates
r = FieldType(g, reprs)
nnl = GatedNonLinearity1(r, gates=gates)
nnl.check_equivariance()
def test_o2_gated_one_input_sorted_gated(self):
g = FlipRot2dOnR2(-1, 10)
reprs = [r for r in g.representations.values() if 'gated' in r.supported_nonlinearities] * 2
r = FieldType(g, reprs).sorted()
ngates = len(r)
reprs = [g.trivial_repr] * ngates
gates = ['gated'] * ngates + ['gate'] * ngates
r = r + FieldType(g, reprs)
nnl = GatedNonLinearity1(r, gates=gates)
nnl.check_equivariance()
def test_o2_gated_one_input_all_shuffled(self):
g = FlipRot2dOnR2(-1, 10)
reprs = [r for r in g.representations.values() if 'gated' in r.supported_nonlinearities] * 3
ngates = len(reprs)
reprs += [g.trivial_repr] * ngates
gates = ['gated'] * ngates + ['gate'] * ngates
t = list(zip(reprs, gates))
random.shuffle(t)
reprs, gates = zip(*t)
r = FieldType(g, reprs)
nnl = GatedNonLinearity1(r, gates=gates)
nnl.check_equivariance()
def test_o2_gated_two_inputs_shuffled_gated(self):
g = FlipRot2dOnR2(-1, 10)
gated = [r for r in g.representations.values() if 'gated' in r.supported_nonlinearities] * 3
ngates = len(gated)
gates = [g.trivial_repr] * ngates
gates = FieldType(g, gates)
gated = FieldType(g, gated)
nnl = GatedNonLinearity2((gates, gated))
nnl.check_equivariance()
def test_o2_gated_two_inputs_sorted_gated(self):
g = FlipRot2dOnR2(-1, 10)
gated = [r for r in g.representations.values() if 'gated' in r.supported_nonlinearities] * 2
ngates = len(gated)
gates = [g.trivial_repr] * ngates
gated = FieldType(g, gated).sorted()
gates = FieldType(g, gates)
nnl = GatedNonLinearity2((gates, gated))
nnl.check_equivariance()
def test_dihedral_gated1_error(self):
N = 8
g = FlipRot2dOnR2(N)
for r in g.representations.values():
if 'gated' not in r.supported_nonlinearities:
r1 = FieldType(g, [r, g.trivial_repr])
gates = ['gated', 'gate']
self.assertRaises(AssertionError, GatedNonLinearity1, r1, gates=gates)
for r in g.representations.values():
if 'gate' not in r.supported_nonlinearities:
r1 = FieldType(g, [g.trivial_repr, r])
gates = ['gated', 'gate']
self.assertRaises(AssertionError, GatedNonLinearity1, r1, gates=gates)
def test_dihedral_gated2_error(self):
N = 8
g = FlipRot2dOnR2(N)
for r in g.representations.values():
if 'gated' not in r.supported_nonlinearities:
gates = FieldType(g, [g.trivial_repr])
gated = FieldType(g, [r])
self.assertRaises(AssertionError, GatedNonLinearity2, (gates, gated))
for r in g.representations.values():
if 'gate' not in r.supported_nonlinearities:
gates = FieldType(g, [r])
gated = FieldType(g, [g.trivial_repr])
self.assertRaises(AssertionError, GatedNonLinearity2, (gates, gated))
def test_dihedral_norm_error(self):
N = 8
g = FlipRot2dOnR2(N)
for r in g.representations.values():
if 'norm' not in r.supported_nonlinearities:
r1 = FieldType(g, [r])
self.assertRaises(AssertionError, NormNonLinearity, r1)
def test_dihedral_pointwise_error(self):
N = 8
g = FlipRot2dOnR2(N)
for r in g.representations.values():
if 'pointwise' not in r.supported_nonlinearities:
r1 = FieldType(g, [r])
self.assertRaises(AssertionError, PointwiseNonLinearity, r1)
def test_dihedral_concat_error(self):
N = 8
g = FlipRot2dOnR2(N)
for r in g.representations.values():
if 'concatenated' not in r.supported_nonlinearities:
r1 = FieldType(g, [r])
self.assertRaises(AssertionError, ConcatenatedNonLinearity, r1)
if __name__ == '__main__':
unittest.main()
| 30.149606
| 107
| 0.572038
| 1,297
| 11,487
| 4.903624
| 0.074017
| 0.058176
| 0.089937
| 0.07956
| 0.908648
| 0.894182
| 0.867138
| 0.836006
| 0.819811
| 0.79827
| 0
| 0.022485
| 0.322451
| 11,487
| 380
| 108
| 30.228947
| 0.794681
| 0
| 0
| 0.758333
| 0
| 0
| 0.026813
| 0
| 0
| 0
| 0
| 0
| 0.029167
| 1
| 0.1125
| false
| 0
| 0.020833
| 0
| 0.1375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65d01a4d1ad87624330d3bcc5a359ecdd7b3f0fa
| 5,880
|
py
|
Python
|
TestModule/AnonymousPlayerTest.py
|
INYEONGKIM/Quattro
|
0fd70b08716f71404f520941791cd314d90de83a
|
[
"MIT"
] | null | null | null |
TestModule/AnonymousPlayerTest.py
|
INYEONGKIM/Quattro
|
0fd70b08716f71404f520941791cd314d90de83a
|
[
"MIT"
] | null | null | null |
TestModule/AnonymousPlayerTest.py
|
INYEONGKIM/Quattro
|
0fd70b08716f71404f520941791cd314d90de83a
|
[
"MIT"
] | null | null | null |
import unittest
from QuattroComponents.Player import Anonymous_player
from QuattroComponents.Card import Card
from TestModule.GetMethodName import get_method_name_decorator
from collections import deque
def reset_player_attributes(anonymous: Anonymous_player):
anonymous.player1_changed = False
anonymous.player2_changed = False
class AnonymousPlayerTest(unittest.TestCase):
# this card doesn't care
origin_card = Card(number=1, color="green", isOpen=False)
method_names = set()
@get_method_name_decorator
def test_correct_zero_card_change(self):
# Zero idx 0
anonymous = Anonymous_player(user_name="anonymous", user_deck=[
Card(number=0, color="zero", isOpen=False),
Card(number=1, color="red", isOpen=False),
Card(number=2, color="red", isOpen=False)
])
opened_deck = deque([])
return_card = anonymous.handle_card_change(user_name='player2', origin_card=self.origin_card, opened_deck=opened_deck)
self.assertEqual(return_card.number, 0)
self.assertEqual(return_card.color, 'zero')
self.assertTrue(anonymous.player2_changed)
# Zero idx 1
reset_player_attributes(anonymous=anonymous)
self.origin_card.isOpen = False
anonymous = Anonymous_player(user_name="anonymous", user_deck=[
Card(number=1, color="red", isOpen=False),
Card(number=0, color="zero", isOpen=False),
Card(number=2, color="red", isOpen=False)
])
opened_deck = deque([])
return_card = anonymous.handle_card_change(user_name='player2', origin_card=self.origin_card, opened_deck=opened_deck)
self.assertEqual(return_card.number, 0)
self.assertEqual(return_card.color, 'zero')
self.assertTrue(anonymous.player2_changed)
# Zero idx 2
reset_player_attributes(anonymous=anonymous)
self.origin_card.isOpen = False
anonymous = Anonymous_player(user_name="anonymous", user_deck=[
Card(number=1, color="red", isOpen=False),
Card(number=2, color="red", isOpen=False),
Card(number=0, color="zero", isOpen=False)
])
opened_deck = deque([])
return_card = anonymous.handle_card_change(user_name='player2', origin_card=self.origin_card,
opened_deck=opened_deck)
self.assertEqual(return_card.number, 0)
self.assertEqual(return_card.color, 'zero')
self.assertTrue(anonymous.player2_changed)
# with opened_deck
reset_player_attributes(anonymous=anonymous)
self.origin_card.isOpen = False
anonymous = Anonymous_player(user_name="anonymous", user_deck=[
Card(number=1, color="red", isOpen=False),
Card(number=2, color="red", isOpen=False),
Card(number=0, color="zero", isOpen=False)
])
opened_deck = deque([Card(number=3, color="blue", isOpen=False)])
return_card = anonymous.handle_card_change(user_name='player2', origin_card=self.origin_card,
opened_deck=opened_deck)
self.assertEqual(return_card.number, 0)
self.assertEqual(return_card.color, 'zero')
self.assertTrue(anonymous.player2_changed)
@get_method_name_decorator
def test_made_quattro_card_change(self):
anonymous = Anonymous_player(user_name="anonymous", user_deck=[
Card(number=1, color="blue", isOpen=False),
Card(number=1, color="yellow", isOpen=False),
Card(number=1, color="red", isOpen=False)
])
opened_deck = deque([
Card(number=6, color="blue", isOpen=True),
Card(number=6, color="red", isOpen=True),
Card(number=6, color="green", isOpen=True)
])
return_card = anonymous.handle_card_change(user_name='player2', origin_card=self.origin_card,
opened_deck=opened_deck)
self.assertEqual(return_card.number, 1)
self.assertEqual(return_card.color, 'yellow')
self.assertTrue(anonymous.player2_changed)
@get_method_name_decorator
def test_top_card_change(self):
anonymous = Anonymous_player(user_name="anonymous", user_deck=[
Card(number=1, color="blue", isOpen=False),
Card(number=2, color="red", isOpen=False),
Card(number=1, color="red", isOpen=False)
])
opened_deck = deque([
Card(number=6, color="blue", isOpen=True),
Card(number=6, color="red", isOpen=True),
Card(number=6, color="green", isOpen=True)
])
return_card = anonymous.handle_card_change(user_name='player2', origin_card=self.origin_card,
opened_deck=opened_deck)
self.assertEqual(return_card.number, 2)
self.assertEqual(return_card.color, 'red')
self.assertTrue(anonymous.player2_changed)
reset_player_attributes(anonymous=anonymous)
self.origin_card.isOpen = False
anonymous.user_deck = [
Card(number=2, color="blue", isOpen=False),
Card(number=2, color="red", isOpen=False),
Card(number=1, color="red", isOpen=False)
]
opened_deck = deque([
Card(number=6, color="blue", isOpen=True),
Card(number=6, color="red", isOpen=True),
Card(number=6, color="green", isOpen=True)
])
return_card = anonymous.handle_card_change(user_name='player2', origin_card=self.origin_card,
opened_deck=opened_deck)
self.assertEqual(return_card.number, 2)
self.assertEqual(return_card.color, 'red')
self.assertTrue(anonymous.player2_changed)
| 43.880597
| 126
| 0.633844
| 679
| 5,880
| 5.27246
| 0.091311
| 0.108939
| 0.06257
| 0.082123
| 0.872905
| 0.848603
| 0.838827
| 0.836034
| 0.836034
| 0.830168
| 0
| 0.013209
| 0.253231
| 5,880
| 133
| 127
| 44.210526
| 0.802095
| 0.012245
| 0
| 0.8125
| 0
| 0
| 0.042916
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 1
| 0.035714
| false
| 0
| 0.044643
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65df788e5e4510c44fcdee2981d1538a1d6e2abd
| 801
|
py
|
Python
|
tests/gcs_test.py
|
rishi1111/vaex
|
b3516201d04e9277b8918dadab9df33a7c83c01a
|
[
"MIT"
] | 1
|
2020-08-31T17:53:01.000Z
|
2020-08-31T17:53:01.000Z
|
tests/gcs_test.py
|
rishi1111/vaex
|
b3516201d04e9277b8918dadab9df33a7c83c01a
|
[
"MIT"
] | null | null | null |
tests/gcs_test.py
|
rishi1111/vaex
|
b3516201d04e9277b8918dadab9df33a7c83c01a
|
[
"MIT"
] | null | null | null |
import vaex
import pytest
@pytest.mark.skipif(vaex.utils.devmode, reason='runs too slow when developing')
def test_gcs():
df = vaex.open('gs://vaex-data/testing/xys.hdf5?cache=false&token=anon')
assert df.x.tolist() == [1, 2]
assert df.y.tolist() == [3, 4]
assert df.s.tolist() == ['5', '6']
df = vaex.open('gs://vaex-data/testing/xys.hdf5?cache=true&token=anon')
assert df.x.tolist() == [1, 2]
assert df.y.tolist() == [3, 4]
assert df.s.tolist() == ['5', '6']
@pytest.mark.skipif(vaex.utils.devmode, reason='runs too slow when developing')
def test_gcs_masked():
df = vaex.open('gs://vaex-data/testing/xys-masked.hdf5?cache=false&token=anon')
assert df.x.tolist() == [1, None]
assert df.y.tolist() == [None, 4]
assert df.s.tolist() == ['5', None]
| 33.375
| 83
| 0.627965
| 129
| 801
| 3.875969
| 0.317829
| 0.144
| 0.06
| 0.072
| 0.87
| 0.87
| 0.836
| 0.836
| 0.776
| 0.776
| 0
| 0.026706
| 0.158552
| 801
| 23
| 84
| 34.826087
| 0.715134
| 0
| 0
| 0.444444
| 0
| 0.055556
| 0.28839
| 0.209738
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.111111
| false
| 0
| 0.111111
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0297324475a0f71073a283c42e8668872ade345c
| 38,375
|
py
|
Python
|
sdk/python/pulumi_databricks/permissions.py
|
pulumi/pulumi-databricks
|
43580d4adbd04b72558f368ff0eef3d03432ebc1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_databricks/permissions.py
|
pulumi/pulumi-databricks
|
43580d4adbd04b72558f368ff0eef3d03432ebc1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_databricks/permissions.py
|
pulumi/pulumi-databricks
|
43580d4adbd04b72558f368ff0eef3d03432ebc1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['PermissionsArgs', 'Permissions']
@pulumi.input_type
class PermissionsArgs:
def __init__(__self__, *,
access_controls: pulumi.Input[Sequence[pulumi.Input['PermissionsAccessControlArgs']]],
authorization: Optional[pulumi.Input[str]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
cluster_policy_id: Optional[pulumi.Input[str]] = None,
directory_id: Optional[pulumi.Input[str]] = None,
directory_path: Optional[pulumi.Input[str]] = None,
experiment_id: Optional[pulumi.Input[str]] = None,
instance_pool_id: Optional[pulumi.Input[str]] = None,
job_id: Optional[pulumi.Input[str]] = None,
notebook_id: Optional[pulumi.Input[str]] = None,
notebook_path: Optional[pulumi.Input[str]] = None,
object_type: Optional[pulumi.Input[str]] = None,
registered_model_id: Optional[pulumi.Input[str]] = None,
repo_id: Optional[pulumi.Input[str]] = None,
repo_path: Optional[pulumi.Input[str]] = None,
sql_alert_id: Optional[pulumi.Input[str]] = None,
sql_dashboard_id: Optional[pulumi.Input[str]] = None,
sql_endpoint_id: Optional[pulumi.Input[str]] = None,
sql_query_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Permissions resource.
:param pulumi.Input[str] authorization: either [`tokens`](https://docs.databricks.com/administration-guide/access-control/tokens.html) or [`passwords`](https://docs.databricks.com/administration-guide/users-groups/single-sign-on/index.html#configure-password-permission).
:param pulumi.Input[str] cluster_id: cluster id
:param pulumi.Input[str] cluster_policy_id: cluster policy id
:param pulumi.Input[str] directory_id: directory id
:param pulumi.Input[str] directory_path: path of directory
:param pulumi.Input[str] instance_pool_id: instance pool id
:param pulumi.Input[str] job_id: job id
:param pulumi.Input[str] notebook_id: ID of notebook within workspace
:param pulumi.Input[str] notebook_path: path of notebook
:param pulumi.Input[str] object_type: type of permissions.
:param pulumi.Input[str] repo_id: repo id
:param pulumi.Input[str] repo_path: path of databricks repo directory(`/Repos/<username>/...`)
"""
pulumi.set(__self__, "access_controls", access_controls)
if authorization is not None:
pulumi.set(__self__, "authorization", authorization)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if cluster_policy_id is not None:
pulumi.set(__self__, "cluster_policy_id", cluster_policy_id)
if directory_id is not None:
pulumi.set(__self__, "directory_id", directory_id)
if directory_path is not None:
pulumi.set(__self__, "directory_path", directory_path)
if experiment_id is not None:
pulumi.set(__self__, "experiment_id", experiment_id)
if instance_pool_id is not None:
pulumi.set(__self__, "instance_pool_id", instance_pool_id)
if job_id is not None:
pulumi.set(__self__, "job_id", job_id)
if notebook_id is not None:
pulumi.set(__self__, "notebook_id", notebook_id)
if notebook_path is not None:
pulumi.set(__self__, "notebook_path", notebook_path)
if object_type is not None:
pulumi.set(__self__, "object_type", object_type)
if registered_model_id is not None:
pulumi.set(__self__, "registered_model_id", registered_model_id)
if repo_id is not None:
pulumi.set(__self__, "repo_id", repo_id)
if repo_path is not None:
pulumi.set(__self__, "repo_path", repo_path)
if sql_alert_id is not None:
pulumi.set(__self__, "sql_alert_id", sql_alert_id)
if sql_dashboard_id is not None:
pulumi.set(__self__, "sql_dashboard_id", sql_dashboard_id)
if sql_endpoint_id is not None:
pulumi.set(__self__, "sql_endpoint_id", sql_endpoint_id)
if sql_query_id is not None:
pulumi.set(__self__, "sql_query_id", sql_query_id)
@property
@pulumi.getter(name="accessControls")
def access_controls(self) -> pulumi.Input[Sequence[pulumi.Input['PermissionsAccessControlArgs']]]:
return pulumi.get(self, "access_controls")
@access_controls.setter
def access_controls(self, value: pulumi.Input[Sequence[pulumi.Input['PermissionsAccessControlArgs']]]):
pulumi.set(self, "access_controls", value)
@property
@pulumi.getter
def authorization(self) -> Optional[pulumi.Input[str]]:
"""
either [`tokens`](https://docs.databricks.com/administration-guide/access-control/tokens.html) or [`passwords`](https://docs.databricks.com/administration-guide/users-groups/single-sign-on/index.html#configure-password-permission).
"""
return pulumi.get(self, "authorization")
@authorization.setter
def authorization(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "authorization", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
"""
cluster id
"""
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="clusterPolicyId")
def cluster_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
cluster policy id
"""
return pulumi.get(self, "cluster_policy_id")
@cluster_policy_id.setter
def cluster_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_policy_id", value)
@property
@pulumi.getter(name="directoryId")
def directory_id(self) -> Optional[pulumi.Input[str]]:
"""
directory id
"""
return pulumi.get(self, "directory_id")
@directory_id.setter
def directory_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "directory_id", value)
@property
@pulumi.getter(name="directoryPath")
def directory_path(self) -> Optional[pulumi.Input[str]]:
"""
path of directory
"""
return pulumi.get(self, "directory_path")
@directory_path.setter
def directory_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "directory_path", value)
@property
@pulumi.getter(name="experimentId")
def experiment_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "experiment_id")
@experiment_id.setter
def experiment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "experiment_id", value)
@property
@pulumi.getter(name="instancePoolId")
def instance_pool_id(self) -> Optional[pulumi.Input[str]]:
"""
instance pool id
"""
return pulumi.get(self, "instance_pool_id")
@instance_pool_id.setter
def instance_pool_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_pool_id", value)
@property
@pulumi.getter(name="jobId")
def job_id(self) -> Optional[pulumi.Input[str]]:
"""
job id
"""
return pulumi.get(self, "job_id")
@job_id.setter
def job_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "job_id", value)
@property
@pulumi.getter(name="notebookId")
def notebook_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of notebook within workspace
"""
return pulumi.get(self, "notebook_id")
@notebook_id.setter
def notebook_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "notebook_id", value)
@property
@pulumi.getter(name="notebookPath")
def notebook_path(self) -> Optional[pulumi.Input[str]]:
"""
path of notebook
"""
return pulumi.get(self, "notebook_path")
@notebook_path.setter
def notebook_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "notebook_path", value)
@property
@pulumi.getter(name="objectType")
def object_type(self) -> Optional[pulumi.Input[str]]:
"""
type of permissions.
"""
return pulumi.get(self, "object_type")
@object_type.setter
def object_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "object_type", value)
@property
@pulumi.getter(name="registeredModelId")
def registered_model_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "registered_model_id")
@registered_model_id.setter
def registered_model_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "registered_model_id", value)
@property
@pulumi.getter(name="repoId")
def repo_id(self) -> Optional[pulumi.Input[str]]:
"""
repo id
"""
return pulumi.get(self, "repo_id")
@repo_id.setter
def repo_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repo_id", value)
@property
@pulumi.getter(name="repoPath")
def repo_path(self) -> Optional[pulumi.Input[str]]:
"""
path of databricks repo directory(`/Repos/<username>/...`)
"""
return pulumi.get(self, "repo_path")
@repo_path.setter
def repo_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repo_path", value)
@property
@pulumi.getter(name="sqlAlertId")
def sql_alert_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "sql_alert_id")
@sql_alert_id.setter
def sql_alert_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_alert_id", value)
@property
@pulumi.getter(name="sqlDashboardId")
def sql_dashboard_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "sql_dashboard_id")
@sql_dashboard_id.setter
def sql_dashboard_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_dashboard_id", value)
@property
@pulumi.getter(name="sqlEndpointId")
def sql_endpoint_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "sql_endpoint_id")
@sql_endpoint_id.setter
def sql_endpoint_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_endpoint_id", value)
@property
@pulumi.getter(name="sqlQueryId")
def sql_query_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "sql_query_id")
@sql_query_id.setter
def sql_query_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_query_id", value)
@pulumi.input_type
class _PermissionsState:
def __init__(__self__, *,
access_controls: Optional[pulumi.Input[Sequence[pulumi.Input['PermissionsAccessControlArgs']]]] = None,
authorization: Optional[pulumi.Input[str]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
cluster_policy_id: Optional[pulumi.Input[str]] = None,
directory_id: Optional[pulumi.Input[str]] = None,
directory_path: Optional[pulumi.Input[str]] = None,
experiment_id: Optional[pulumi.Input[str]] = None,
instance_pool_id: Optional[pulumi.Input[str]] = None,
job_id: Optional[pulumi.Input[str]] = None,
notebook_id: Optional[pulumi.Input[str]] = None,
notebook_path: Optional[pulumi.Input[str]] = None,
object_type: Optional[pulumi.Input[str]] = None,
registered_model_id: Optional[pulumi.Input[str]] = None,
repo_id: Optional[pulumi.Input[str]] = None,
repo_path: Optional[pulumi.Input[str]] = None,
sql_alert_id: Optional[pulumi.Input[str]] = None,
sql_dashboard_id: Optional[pulumi.Input[str]] = None,
sql_endpoint_id: Optional[pulumi.Input[str]] = None,
sql_query_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Permissions resources.
:param pulumi.Input[str] authorization: either [`tokens`](https://docs.databricks.com/administration-guide/access-control/tokens.html) or [`passwords`](https://docs.databricks.com/administration-guide/users-groups/single-sign-on/index.html#configure-password-permission).
:param pulumi.Input[str] cluster_id: cluster id
:param pulumi.Input[str] cluster_policy_id: cluster policy id
:param pulumi.Input[str] directory_id: directory id
:param pulumi.Input[str] directory_path: path of directory
:param pulumi.Input[str] instance_pool_id: instance pool id
:param pulumi.Input[str] job_id: job id
:param pulumi.Input[str] notebook_id: ID of notebook within workspace
:param pulumi.Input[str] notebook_path: path of notebook
:param pulumi.Input[str] object_type: type of permissions.
:param pulumi.Input[str] repo_id: repo id
:param pulumi.Input[str] repo_path: path of databricks repo directory(`/Repos/<username>/...`)
"""
if access_controls is not None:
pulumi.set(__self__, "access_controls", access_controls)
if authorization is not None:
pulumi.set(__self__, "authorization", authorization)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if cluster_policy_id is not None:
pulumi.set(__self__, "cluster_policy_id", cluster_policy_id)
if directory_id is not None:
pulumi.set(__self__, "directory_id", directory_id)
if directory_path is not None:
pulumi.set(__self__, "directory_path", directory_path)
if experiment_id is not None:
pulumi.set(__self__, "experiment_id", experiment_id)
if instance_pool_id is not None:
pulumi.set(__self__, "instance_pool_id", instance_pool_id)
if job_id is not None:
pulumi.set(__self__, "job_id", job_id)
if notebook_id is not None:
pulumi.set(__self__, "notebook_id", notebook_id)
if notebook_path is not None:
pulumi.set(__self__, "notebook_path", notebook_path)
if object_type is not None:
pulumi.set(__self__, "object_type", object_type)
if registered_model_id is not None:
pulumi.set(__self__, "registered_model_id", registered_model_id)
if repo_id is not None:
pulumi.set(__self__, "repo_id", repo_id)
if repo_path is not None:
pulumi.set(__self__, "repo_path", repo_path)
if sql_alert_id is not None:
pulumi.set(__self__, "sql_alert_id", sql_alert_id)
if sql_dashboard_id is not None:
pulumi.set(__self__, "sql_dashboard_id", sql_dashboard_id)
if sql_endpoint_id is not None:
pulumi.set(__self__, "sql_endpoint_id", sql_endpoint_id)
if sql_query_id is not None:
pulumi.set(__self__, "sql_query_id", sql_query_id)
@property
@pulumi.getter(name="accessControls")
def access_controls(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PermissionsAccessControlArgs']]]]:
return pulumi.get(self, "access_controls")
@access_controls.setter
def access_controls(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PermissionsAccessControlArgs']]]]):
pulumi.set(self, "access_controls", value)
@property
@pulumi.getter
def authorization(self) -> Optional[pulumi.Input[str]]:
"""
either [`tokens`](https://docs.databricks.com/administration-guide/access-control/tokens.html) or [`passwords`](https://docs.databricks.com/administration-guide/users-groups/single-sign-on/index.html#configure-password-permission).
"""
return pulumi.get(self, "authorization")
@authorization.setter
def authorization(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "authorization", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
"""
cluster id
"""
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="clusterPolicyId")
def cluster_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
cluster policy id
"""
return pulumi.get(self, "cluster_policy_id")
@cluster_policy_id.setter
def cluster_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_policy_id", value)
@property
@pulumi.getter(name="directoryId")
def directory_id(self) -> Optional[pulumi.Input[str]]:
"""
directory id
"""
return pulumi.get(self, "directory_id")
@directory_id.setter
def directory_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "directory_id", value)
@property
@pulumi.getter(name="directoryPath")
def directory_path(self) -> Optional[pulumi.Input[str]]:
"""
path of directory
"""
return pulumi.get(self, "directory_path")
@directory_path.setter
def directory_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "directory_path", value)
@property
@pulumi.getter(name="experimentId")
def experiment_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "experiment_id")
@experiment_id.setter
def experiment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "experiment_id", value)
@property
@pulumi.getter(name="instancePoolId")
def instance_pool_id(self) -> Optional[pulumi.Input[str]]:
"""
instance pool id
"""
return pulumi.get(self, "instance_pool_id")
@instance_pool_id.setter
def instance_pool_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_pool_id", value)
@property
@pulumi.getter(name="jobId")
def job_id(self) -> Optional[pulumi.Input[str]]:
"""
job id
"""
return pulumi.get(self, "job_id")
@job_id.setter
def job_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "job_id", value)
@property
@pulumi.getter(name="notebookId")
def notebook_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of notebook within workspace
"""
return pulumi.get(self, "notebook_id")
@notebook_id.setter
def notebook_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "notebook_id", value)
@property
@pulumi.getter(name="notebookPath")
def notebook_path(self) -> Optional[pulumi.Input[str]]:
"""
path of notebook
"""
return pulumi.get(self, "notebook_path")
@notebook_path.setter
def notebook_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "notebook_path", value)
@property
@pulumi.getter(name="objectType")
def object_type(self) -> Optional[pulumi.Input[str]]:
"""
type of permissions.
"""
return pulumi.get(self, "object_type")
@object_type.setter
def object_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "object_type", value)
@property
@pulumi.getter(name="registeredModelId")
def registered_model_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "registered_model_id")
@registered_model_id.setter
def registered_model_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "registered_model_id", value)
@property
@pulumi.getter(name="repoId")
def repo_id(self) -> Optional[pulumi.Input[str]]:
"""
repo id
"""
return pulumi.get(self, "repo_id")
@repo_id.setter
def repo_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repo_id", value)
@property
@pulumi.getter(name="repoPath")
def repo_path(self) -> Optional[pulumi.Input[str]]:
"""
path of databricks repo directory(`/Repos/<username>/...`)
"""
return pulumi.get(self, "repo_path")
@repo_path.setter
def repo_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repo_path", value)
@property
@pulumi.getter(name="sqlAlertId")
def sql_alert_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "sql_alert_id")
@sql_alert_id.setter
def sql_alert_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_alert_id", value)
@property
@pulumi.getter(name="sqlDashboardId")
def sql_dashboard_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "sql_dashboard_id")
@sql_dashboard_id.setter
def sql_dashboard_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_dashboard_id", value)
@property
@pulumi.getter(name="sqlEndpointId")
def sql_endpoint_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "sql_endpoint_id")
@sql_endpoint_id.setter
def sql_endpoint_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_endpoint_id", value)
@property
@pulumi.getter(name="sqlQueryId")
def sql_query_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "sql_query_id")
@sql_query_id.setter
def sql_query_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_query_id", value)
class Permissions(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_controls: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PermissionsAccessControlArgs']]]]] = None,
authorization: Optional[pulumi.Input[str]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
cluster_policy_id: Optional[pulumi.Input[str]] = None,
directory_id: Optional[pulumi.Input[str]] = None,
directory_path: Optional[pulumi.Input[str]] = None,
experiment_id: Optional[pulumi.Input[str]] = None,
instance_pool_id: Optional[pulumi.Input[str]] = None,
job_id: Optional[pulumi.Input[str]] = None,
notebook_id: Optional[pulumi.Input[str]] = None,
notebook_path: Optional[pulumi.Input[str]] = None,
object_type: Optional[pulumi.Input[str]] = None,
registered_model_id: Optional[pulumi.Input[str]] = None,
repo_id: Optional[pulumi.Input[str]] = None,
repo_path: Optional[pulumi.Input[str]] = None,
sql_alert_id: Optional[pulumi.Input[str]] = None,
sql_dashboard_id: Optional[pulumi.Input[str]] = None,
sql_endpoint_id: Optional[pulumi.Input[str]] = None,
sql_query_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
## Import
The resource permissions can be imported using the object id bash
```sh
$ pulumi import databricks:index/permissions:Permissions this /<object type>/<object id>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] authorization: either [`tokens`](https://docs.databricks.com/administration-guide/access-control/tokens.html) or [`passwords`](https://docs.databricks.com/administration-guide/users-groups/single-sign-on/index.html#configure-password-permission).
:param pulumi.Input[str] cluster_id: cluster id
:param pulumi.Input[str] cluster_policy_id: cluster policy id
:param pulumi.Input[str] directory_id: directory id
:param pulumi.Input[str] directory_path: path of directory
:param pulumi.Input[str] instance_pool_id: instance pool id
:param pulumi.Input[str] job_id: job id
:param pulumi.Input[str] notebook_id: ID of notebook within workspace
:param pulumi.Input[str] notebook_path: path of notebook
:param pulumi.Input[str] object_type: type of permissions.
:param pulumi.Input[str] repo_id: repo id
:param pulumi.Input[str] repo_path: path of databricks repo directory(`/Repos/<username>/...`)
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PermissionsArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
The resource permissions can be imported using the object id bash
```sh
$ pulumi import databricks:index/permissions:Permissions this /<object type>/<object id>
```
:param str resource_name: The name of the resource.
:param PermissionsArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PermissionsArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_controls: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PermissionsAccessControlArgs']]]]] = None,
authorization: Optional[pulumi.Input[str]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
cluster_policy_id: Optional[pulumi.Input[str]] = None,
directory_id: Optional[pulumi.Input[str]] = None,
directory_path: Optional[pulumi.Input[str]] = None,
experiment_id: Optional[pulumi.Input[str]] = None,
instance_pool_id: Optional[pulumi.Input[str]] = None,
job_id: Optional[pulumi.Input[str]] = None,
notebook_id: Optional[pulumi.Input[str]] = None,
notebook_path: Optional[pulumi.Input[str]] = None,
object_type: Optional[pulumi.Input[str]] = None,
registered_model_id: Optional[pulumi.Input[str]] = None,
repo_id: Optional[pulumi.Input[str]] = None,
repo_path: Optional[pulumi.Input[str]] = None,
sql_alert_id: Optional[pulumi.Input[str]] = None,
sql_dashboard_id: Optional[pulumi.Input[str]] = None,
sql_endpoint_id: Optional[pulumi.Input[str]] = None,
sql_query_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PermissionsArgs.__new__(PermissionsArgs)
if access_controls is None and not opts.urn:
raise TypeError("Missing required property 'access_controls'")
__props__.__dict__["access_controls"] = access_controls
__props__.__dict__["authorization"] = authorization
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["cluster_policy_id"] = cluster_policy_id
__props__.__dict__["directory_id"] = directory_id
__props__.__dict__["directory_path"] = directory_path
__props__.__dict__["experiment_id"] = experiment_id
__props__.__dict__["instance_pool_id"] = instance_pool_id
__props__.__dict__["job_id"] = job_id
__props__.__dict__["notebook_id"] = notebook_id
__props__.__dict__["notebook_path"] = notebook_path
__props__.__dict__["object_type"] = object_type
__props__.__dict__["registered_model_id"] = registered_model_id
__props__.__dict__["repo_id"] = repo_id
__props__.__dict__["repo_path"] = repo_path
__props__.__dict__["sql_alert_id"] = sql_alert_id
__props__.__dict__["sql_dashboard_id"] = sql_dashboard_id
__props__.__dict__["sql_endpoint_id"] = sql_endpoint_id
__props__.__dict__["sql_query_id"] = sql_query_id
super(Permissions, __self__).__init__(
'databricks:index/permissions:Permissions',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
access_controls: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PermissionsAccessControlArgs']]]]] = None,
authorization: Optional[pulumi.Input[str]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
cluster_policy_id: Optional[pulumi.Input[str]] = None,
directory_id: Optional[pulumi.Input[str]] = None,
directory_path: Optional[pulumi.Input[str]] = None,
experiment_id: Optional[pulumi.Input[str]] = None,
instance_pool_id: Optional[pulumi.Input[str]] = None,
job_id: Optional[pulumi.Input[str]] = None,
notebook_id: Optional[pulumi.Input[str]] = None,
notebook_path: Optional[pulumi.Input[str]] = None,
object_type: Optional[pulumi.Input[str]] = None,
registered_model_id: Optional[pulumi.Input[str]] = None,
repo_id: Optional[pulumi.Input[str]] = None,
repo_path: Optional[pulumi.Input[str]] = None,
sql_alert_id: Optional[pulumi.Input[str]] = None,
sql_dashboard_id: Optional[pulumi.Input[str]] = None,
sql_endpoint_id: Optional[pulumi.Input[str]] = None,
sql_query_id: Optional[pulumi.Input[str]] = None) -> 'Permissions':
"""
Get an existing Permissions resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] authorization: either [`tokens`](https://docs.databricks.com/administration-guide/access-control/tokens.html) or [`passwords`](https://docs.databricks.com/administration-guide/users-groups/single-sign-on/index.html#configure-password-permission).
:param pulumi.Input[str] cluster_id: cluster id
:param pulumi.Input[str] cluster_policy_id: cluster policy id
:param pulumi.Input[str] directory_id: directory id
:param pulumi.Input[str] directory_path: path of directory
:param pulumi.Input[str] instance_pool_id: instance pool id
:param pulumi.Input[str] job_id: job id
:param pulumi.Input[str] notebook_id: ID of notebook within workspace
:param pulumi.Input[str] notebook_path: path of notebook
:param pulumi.Input[str] object_type: type of permissions.
:param pulumi.Input[str] repo_id: repo id
:param pulumi.Input[str] repo_path: path of databricks repo directory(`/Repos/<username>/...`)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PermissionsState.__new__(_PermissionsState)
__props__.__dict__["access_controls"] = access_controls
__props__.__dict__["authorization"] = authorization
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["cluster_policy_id"] = cluster_policy_id
__props__.__dict__["directory_id"] = directory_id
__props__.__dict__["directory_path"] = directory_path
__props__.__dict__["experiment_id"] = experiment_id
__props__.__dict__["instance_pool_id"] = instance_pool_id
__props__.__dict__["job_id"] = job_id
__props__.__dict__["notebook_id"] = notebook_id
__props__.__dict__["notebook_path"] = notebook_path
__props__.__dict__["object_type"] = object_type
__props__.__dict__["registered_model_id"] = registered_model_id
__props__.__dict__["repo_id"] = repo_id
__props__.__dict__["repo_path"] = repo_path
__props__.__dict__["sql_alert_id"] = sql_alert_id
__props__.__dict__["sql_dashboard_id"] = sql_dashboard_id
__props__.__dict__["sql_endpoint_id"] = sql_endpoint_id
__props__.__dict__["sql_query_id"] = sql_query_id
return Permissions(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accessControls")
def access_controls(self) -> pulumi.Output[Sequence['outputs.PermissionsAccessControl']]:
return pulumi.get(self, "access_controls")
@property
@pulumi.getter
def authorization(self) -> pulumi.Output[Optional[str]]:
"""
either [`tokens`](https://docs.databricks.com/administration-guide/access-control/tokens.html) or [`passwords`](https://docs.databricks.com/administration-guide/users-groups/single-sign-on/index.html#configure-password-permission).
"""
return pulumi.get(self, "authorization")
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Output[Optional[str]]:
"""
cluster id
"""
return pulumi.get(self, "cluster_id")
@property
@pulumi.getter(name="clusterPolicyId")
def cluster_policy_id(self) -> pulumi.Output[Optional[str]]:
"""
cluster policy id
"""
return pulumi.get(self, "cluster_policy_id")
@property
@pulumi.getter(name="directoryId")
def directory_id(self) -> pulumi.Output[Optional[str]]:
"""
directory id
"""
return pulumi.get(self, "directory_id")
@property
@pulumi.getter(name="directoryPath")
def directory_path(self) -> pulumi.Output[Optional[str]]:
"""
path of directory
"""
return pulumi.get(self, "directory_path")
@property
@pulumi.getter(name="experimentId")
def experiment_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "experiment_id")
@property
@pulumi.getter(name="instancePoolId")
def instance_pool_id(self) -> pulumi.Output[Optional[str]]:
"""
instance pool id
"""
return pulumi.get(self, "instance_pool_id")
@property
@pulumi.getter(name="jobId")
def job_id(self) -> pulumi.Output[Optional[str]]:
"""
job id
"""
return pulumi.get(self, "job_id")
@property
@pulumi.getter(name="notebookId")
def notebook_id(self) -> pulumi.Output[Optional[str]]:
"""
ID of notebook within workspace
"""
return pulumi.get(self, "notebook_id")
@property
@pulumi.getter(name="notebookPath")
def notebook_path(self) -> pulumi.Output[Optional[str]]:
"""
path of notebook
"""
return pulumi.get(self, "notebook_path")
@property
@pulumi.getter(name="objectType")
def object_type(self) -> pulumi.Output[str]:
"""
type of permissions.
"""
return pulumi.get(self, "object_type")
@property
@pulumi.getter(name="registeredModelId")
def registered_model_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "registered_model_id")
@property
@pulumi.getter(name="repoId")
def repo_id(self) -> pulumi.Output[Optional[str]]:
"""
repo id
"""
return pulumi.get(self, "repo_id")
@property
@pulumi.getter(name="repoPath")
def repo_path(self) -> pulumi.Output[Optional[str]]:
"""
path of databricks repo directory(`/Repos/<username>/...`)
"""
return pulumi.get(self, "repo_path")
@property
@pulumi.getter(name="sqlAlertId")
def sql_alert_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "sql_alert_id")
@property
@pulumi.getter(name="sqlDashboardId")
def sql_dashboard_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "sql_dashboard_id")
@property
@pulumi.getter(name="sqlEndpointId")
def sql_endpoint_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "sql_endpoint_id")
@property
@pulumi.getter(name="sqlQueryId")
def sql_query_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "sql_query_id")
| 41.802832
| 279
| 0.645368
| 4,544
| 38,375
| 5.15757
| 0.042914
| 0.108892
| 0.126643
| 0.152074
| 0.925969
| 0.917648
| 0.907407
| 0.901135
| 0.891961
| 0.864397
| 0
| 0.000034
| 0.23445
| 38,375
| 917
| 280
| 41.848419
| 0.797706
| 0.165524
| 0
| 0.885434
| 1
| 0
| 0.109275
| 0.010603
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166939
| false
| 0.001637
| 0.011457
| 0.03437
| 0.278232
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
02d4e6b3a3eee626ac3250b843b87270720d699e
| 56
|
py
|
Python
|
tests/test_init.py
|
keisuke-umezawa/chutil
|
df60440983c38a6dbbe4710019bcec5e83331904
|
[
"MIT"
] | 1
|
2019-02-16T06:20:50.000Z
|
2019-02-16T06:20:50.000Z
|
tests/test_init.py
|
keisuke-umezawa/chutil
|
df60440983c38a6dbbe4710019bcec5e83331904
|
[
"MIT"
] | null | null | null |
tests/test_init.py
|
keisuke-umezawa/chutil
|
df60440983c38a6dbbe4710019bcec5e83331904
|
[
"MIT"
] | null | null | null |
import chutil as module
def test_versions():
pass
| 9.333333
| 23
| 0.714286
| 8
| 56
| 4.875
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.232143
| 56
| 5
| 24
| 11.2
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
b824108791760c3044be86fca8557a92a30f2d41
| 27,400
|
py
|
Python
|
gsf/function_class.py
|
mtakahiro/gsf
|
c09c5d32a45b0277c469d2d3cb2f8c11f1fc0278
|
[
"MIT"
] | 9
|
2019-08-23T19:00:54.000Z
|
2022-02-23T17:57:41.000Z
|
gsf/function_class.py
|
mtakahiro/gsf
|
c09c5d32a45b0277c469d2d3cb2f8c11f1fc0278
|
[
"MIT"
] | 17
|
2020-05-22T17:41:15.000Z
|
2022-03-20T03:32:48.000Z
|
gsf/function_class.py
|
mtakahiro/gsf
|
c09c5d32a45b0277c469d2d3cb2f8c11f1fc0278
|
[
"MIT"
] | 1
|
2020-02-01T22:55:37.000Z
|
2020-02-01T22:55:37.000Z
|
import numpy as np
import sys
import scipy.interpolate as interpolate
import asdf
from .function import *
from .basic_func import Basic
class Func:
'''
The list of (possible) `Func` attributes is given below:
Attributes
----------
'''
def __init__(self, MB, dust_model=0):
'''
Parameters
----------
dust_model : int
0 for Calzetti.
'''
self.ID = MB.ID
self.ZZ = MB.Zall
self.age = MB.age
self.AA = MB.nage
self.tau0 = MB.tau0
self.MB = MB
self.dust_model = dust_model
self.DIR_TMP = MB.DIR_TMP
if MB.f_dust:
self.Temp = MB.Temp
try:
self.filts = MB.filts
self.DIR_FIL = MB.DIR_FILT
except:
pass
# Already Read or not;
self.f_af = False
self.f_af0 = False
def demo(self):
ZZ = self.ZZ
AA = self.AA
return ZZ, AA
#############################
# Load template in obs range.
#############################
def open_spec_fits(self, fall=0, orig=False):
'''
'''
ID0 = self.MB.ID
tau0= self.MB.tau0 #[0.01,0.02,0.03]
from astropy.io import fits
ZZ = self.ZZ
AA = self.AA
bfnc = self.MB.bfnc #Basic(ZZ)
# ASDF;
if fall == 0:
app = ''
hdu0 = self.MB.af['spec']
elif fall == 1:
app = 'all_'
hdu0 = self.MB.af['spec_full']
DIR_TMP = self.DIR_TMP
for pp in range(len(tau0)):
for zz in range(len(ZZ)):
Z = ZZ[zz]
NZ = bfnc.Z2NZ(Z)
if zz == 0 and pp == 0:
nr = hdu0['colnum']
xx = hdu0['wavelength']
lib = np.zeros((len(nr), 2+len(AA)*len(ZZ)*len(tau0)), dtype='float')
lib[:,0] = nr[:]
lib[:,1] = xx[:]
for aa in range(len(AA)):
coln = int(2 + aa)
if orig:
colname = 'fspec_orig_' + str(zz) + '_' + str(aa) + '_' + str(pp)
else:
colname = 'fspec_' + str(zz) + '_' + str(aa) + '_' + str(pp)
colnall = int(2 + pp*len(ZZ)*len(AA) + zz*len(AA) + aa) # 2 takes account of wavelength and AV columns.
lib[:,colnall] = hdu0[colname]
return lib
def open_spec_dust_fits(self, fall=0):
'''
Loads dust template in obs range.
'''
ID0 = self.MB.ID
tau0= self.MB.tau0 #[0.01,0.02,0.03]
from astropy.io import fits
ZZ = self.ZZ
AA = self.AA
bfnc = self.MB.bfnc #Basic(ZZ)
self.MB.af = asdf.open(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf')
self.MB.af0 = asdf.open(self.DIR_TMP + 'spec_all.asdf')
if fall == 0:
app = ''
hdu0 = self.MB.af['spec_dust']
elif fall == 1:
app = 'all_'
hdu0 = self.MB.af['spec_dust_full']
DIR_TMP = self.DIR_TMP
nr = hdu0['colnum']
xx = hdu0['wavelength']
lib = np.zeros((len(nr), 2+len(self.Temp)), dtype='float')
lib[:,0] = nr[:]
lib[:,1] = xx[:]
for aa in range(len(self.Temp)):
coln = int(2 + aa)
colname = 'fspec_' + str(aa)
colnall = int(2 + aa) # 2 takes account of wavelength and AV columns.
lib[:,colnall] = hdu0[colname]
if fall==1 and False:
import matplotlib.pyplot as plt
plt.close()
plt.plot(lib[:,1],lib[:,coln],linestyle='-')
plt.show()
return lib
def open_spec_fits_dir(self, nage, nz, kk, Av00, zgal, A00):
'''
Load template in obs range.
But for weird template.
'''
from astropy.io import fits
tau0= self.tau0 #[0.01,0.02,0.03]
ZZ = self.ZZ
AA = self.AA
bfnc = self.MB.bfnc #Basic(ZZ)
self.MB.af = asdf.open(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf')
self.MB.af0 = asdf.open(self.DIR_TMP + 'spec_all.asdf')
app = 'all'
hdu0 = self.MB.af['spec_full']
DIR_TMP = self.DIR_TMP #'./templates/'
pp = 0
zz = nz
# Luminosity
mshdu = self.MB.af0['ML']
Ls = mshdu['Ls_%d'%nz]
xx = hdu0['wavelength'] # at RF;
nr = np.arange(0,len(xx),1) #hdu0.data['colnum']
lib = np.zeros((len(nr), 2+1), dtype='float')
lib[:,0] = nr[:]
lib[:,1] = xx[:]
aa = nage
coln = int(2 + aa)
colname = 'fspec_' + str(zz) + '_' + str(aa) + '_' + str(pp)
yy0 = hdu0[colname]/Ls[aa]
yy = flamtonu(xx, yy0)
lib[:,2] = yy[:]
if self.dust_model == 0: # Calzetti
yyd, xxd, nrd = dust_calz(xx, yy, Av00, nr)
elif self.dust_model == 1: # MW
yyd, xxd, nrd = dust_mw(xx, yy, Av00, nr)
elif self.dust_model == 2: # LMC
yyd, xxd, nrd = dust_gen(xx, yy, Av00, nr, Rv=4.05, gamma=-0.06, Eb=2.8)
elif self.dust_model == 3: # SMC
yyd, xxd, nrd = dust_gen(xx, yy, Av00, nr, Rv=4.05, gamma=-0.42, Eb=0.0)
elif self.dust_model == 4: # Kriek&Conroy with gamma=-0.2
yyd, xxd, nrd = dust_kc(xx, yy, Av00, nr, Rv=4.05, gamma=-0.2)
else:
print('No entry. Dust model is set to Calzetti')
yyd, xxd, nrd = dust_calz(xx, yy, Av00, nr)
xxd *= (1.+zgal)
nrd_yyd = np.zeros((len(nrd),3), dtype='float')
nrd_yyd[:,0] = nrd[:]
nrd_yyd[:,1] = yyd[:]
nrd_yyd[:,2] = xxd[:]
b = nrd_yyd
nrd_yyd_sort = b[np.lexsort(([-1,1]*b[:,[1,0]]).T)]
yyd_sort = nrd_yyd_sort[:,1]
xxd_sort = nrd_yyd_sort[:,2]
return A00 * yyd_sort, xxd_sort
def get_template(self, lib, Amp=1.0, T=1.0, Av=0.0, Z=0.0, zgal=1.0, f_bb=False):
'''
Gets an element template given a set of parameters.
Not necessarily the most efficient way, but easy to use.
Parameters:
-----------
lib : dict
library dictionary.
Amp : float
Amplitude of the target template. Note that each template has Lbol = 1e10Lsun.
T : float
Age, in Gyr.
Av : float
Dust attenuation, in mag.
Z : float
Metallicity, in log(Z/Zsun).
zgal : float
Redshift.
f_bb: bool
If calculate bb photometry for the spectrum requested.
Returns
flux : float array. Flux in Fnu.
wavelength : float array. Wave in AA.
lcen, lflux : , if f_bb==True.
'''
bfnc = self.MB.bfnc
DIR_TMP = self.MB.DIR_TMP
NZ = bfnc.Z2NZ(Z)
pp0 = np.random.uniform(low=0, high=len(self.tau0), size=(1,))
pp = int(pp0[0])
if pp>=len(self.tau0):
pp += -1
nmodel = np.argmin(np.abs(T-self.age[:]))
if T - self.age[nmodel] != 0:
print('T=%.2f is not found in age library. T=%.2f is used.'%(T,self.age[nmodel]))
coln= int(2 + pp*len(self.ZZ)*len(self.AA) + NZ*len(self.AA) + nmodel)
nr = lib[:, 0]
xx = lib[:, 1] # This is OBSERVED wavelength range at z=zgal
yy = lib[:, coln]
if self.dust_model == 0:
yyd, xxd, nrd = dust_calz(xx/(1.+zgal), yy, Av, nr)
elif self.dust_model == 1:
yyd, xxd, nrd = dust_mw(xx/(1.+zgal), yy, Av, nr)
elif self.dust_model == 2: # LMC
yyd, xxd, nrd = dust_gen(xx/(1.+zgal), yy, Av, nr, Rv=4.05, gamma=-0.06, Eb=2.8)
elif self.dust_model == 3: # SMC
yyd, xxd, nrd = dust_gen(xx/(1.+zgal), yy, Av, nr, Rv=4.05, gamma=-0.42, Eb=0.0)
elif self.dust_model == 4: # Kriek&Conroy with gamma=-0.2
yyd, xxd, nrd = dust_kc(xx/(1.+zgal), yy, Av, nr, Rv=4.05, gamma=-0.2)
else:
yyd, xxd, nrd = dust_calz(xx/(1.+zgal), yy, Av, nr)
xxd *= (1.+zgal)
nrd_yyd = np.zeros((len(nrd),3), dtype='float')
nrd_yyd[:,0] = nrd[:]
nrd_yyd[:,1] = yyd[:]
nrd_yyd[:,2] = xxd[:]
b = nrd_yyd
nrd_yyd_sort = b[np.lexsort(([-1,1]*b[:,[1,0]]).T)]
yyd_sort = nrd_yyd_sort[:,1]
xxd_sort = nrd_yyd_sort[:,2]
if f_bb:
#fil_cen, fil_flux = filconv(self.filts, xxd_sort, Amp * yyd_sort, self.DIR_FIL)
fil_cen, fil_flux = filconv_fast(self.MB, xxd_sort, Amp * yyd_sort)
return Amp * yyd_sort, xxd_sort, fil_flux, fil_cen
else:
return Amp * yyd_sort, xxd_sort
def tmp03(self, A00, Av00, nmodel, Z, zgal, lib):
'''
'''
tau0= self.tau0 #[0.01,0.02,0.03]
ZZ = self.ZZ
AA = self.AA
bfnc = self.MB.bfnc #Basic(ZZ)
DIR_TMP = self.MB.DIR_TMP #'./templates/'
NZ = bfnc.Z2NZ(Z)
pp0 = np.random.uniform(low=0, high=len(tau0), size=(1,))
pp = int(pp0[0])
if pp>=len(tau0):
pp += -1
coln= int(2 + pp*len(ZZ)*len(AA) + NZ*len(AA) + nmodel)
nr = lib[:,0]
xx = lib[:,1] # This is OBSERVED wavelength range at z=zgal
yy = lib[:,coln]
if self.dust_model == 0:
yyd, xxd, nrd = dust_calz(xx/(1.+zgal), yy, Av00, nr)
elif self.dust_model == 1:
yyd, xxd, nrd = dust_mw(xx/(1.+zgal), yy, Av00, nr)
elif self.dust_model == 2: # LMC
yyd, xxd, nrd = dust_gen(xx/(1.+zgal), yy, Av00, nr, Rv=4.05, gamma=-0.06, Eb=2.8)
elif self.dust_model == 3: # SMC
yyd, xxd, nrd = dust_gen(xx/(1.+zgal), yy, Av00, nr, Rv=4.05, gamma=-0.42, Eb=0.0)
elif self.dust_model == 4: # Kriek&Conroy with gamma=-0.2
yyd, xxd, nrd = dust_kc(xx/(1.+zgal), yy, Av00, nr, Rv=4.05, gamma=-0.2)
else:
yyd, xxd, nrd = dust_calz(xx/(1.+zgal), yy, Av00, nr)
xxd *= (1.+zgal)
nrd_yyd = np.zeros((len(nrd),3), dtype='float')
nrd_yyd[:,0] = nrd[:]
nrd_yyd[:,1] = yyd[:]
nrd_yyd[:,2] = xxd[:]
b = nrd_yyd
nrd_yyd_sort = b[np.lexsort(([-1,1]*b[:,[1,0]]).T)]
yyd_sort = nrd_yyd_sort[:,1]
xxd_sort = nrd_yyd_sort[:,2]
return A00 * yyd_sort, xxd_sort
def tmp04(self, par, f_Alog=True, nprec=1, f_val=False, lib_all=False, f_nrd=False):
'''
Makes model template with a given param set.
Also dust attenuation.
Parameters
----------
nprec : int
Precision when redshift is refined.
'''
ZZ = self.ZZ
AA = self.AA
bfnc = self.MB.bfnc
Mtot = 0
if f_val:
par = par.params
if self.MB.fzmc == 1:
try:
zmc = par['zmc'].value
except:
zmc = self.MB.zgal
else:
zmc = self.MB.zgal
pp = 0
# AV limit;
if par['Av'] < self.MB.Avmin:
par['Av'] = self.MB.Avmin
if par['Av'] > self.MB.Avmax:
par['Av'] = self.MB.Avmax
Av00 = par['Av']
for aa in range(len(AA)):
if self.MB.ZEVOL==1 or aa == 0:
Z = par['Z'+str(aa)]
NZ = bfnc.Z2NZ(Z)
else:
pass
# Check limit;
if par['A'+str(aa)] < self.MB.Amin:
par['A'+str(aa)] = self.MB.Amin
if par['A'+str(aa)] > self.MB.Amax:
par['A'+str(aa)] = self.MB.Amax
# Z limit:
if aa == 0 or self.MB.ZEVOL == 1:
if par['Z%d'%aa] < self.MB.Zmin:
par['Z%d'%aa] = self.MB.Zmin
if par['Z%d'%aa] > self.MB.Zmax:
par['Z%d'%aa] = self.MB.Zmax
# Is A in logspace?
if f_Alog:
A00 = 10**par['A'+str(aa)]
else:
A00 = par['A'+str(aa)]
coln = int(2 + pp*len(ZZ)*len(AA) + NZ*len(AA) + aa)
sedpar = self.MB.af['ML'] # For M/L
mslist = sedpar['ML_'+str(NZ)][aa]
Mtot += 10**(par['A%d'%aa] + np.log10(mslist))
if lib_all:
if aa == 0:
nr = self.MB.lib_all[:, 0]
xx = self.MB.lib_all[:, 1] # This is OBSERVED wavelength range at z=zgal
yy = A00 * self.MB.lib_all[:, coln]
else:
yy += A00 * self.MB.lib_all[:, coln]
else:
if aa == 0:
nr = self.MB.lib[:, 0]
xx = self.MB.lib[:, 1] # This is OBSERVED wavelength range at z=zgal
yy = A00 * self.MB.lib[:, coln]
else:
yy += A00 * self.MB.lib[:, coln]
self.MB.logMtmp = np.log10(Mtot)
if round(zmc,nprec) != round(self.MB.zgal,nprec):
xx_s = xx / (1+self.MB.zgal) * (1+zmc)
fint = interpolate.interp1d(xx, yy, kind='nearest', fill_value="extrapolate")
yy_s = fint(xx_s)
else:
xx_s = xx
yy_s = yy
xx = xx_s
yy = yy_s
if self.dust_model == 0:
yyd, xxd, nrd = dust_calz(xx/(1.+zmc), yy, Av00, nr)
elif self.dust_model == 1:
yyd, xxd, nrd = dust_mw(xx/(1.+zmc), yy, Av00, nr)
elif self.dust_model == 2: # LMC
yyd, xxd, nrd = dust_gen(xx/(1.+zmc), yy, Av00, nr, Rv=4.05, gamma=-0.06, Eb=2.8)
elif self.dust_model == 3: # SMC
yyd, xxd, nrd = dust_gen(xx/(1.+zmc), yy, Av00, nr, Rv=4.05, gamma=-0.42, Eb=0.0)
elif self.dust_model == 4: # Kriek&Conroy with gamma=-0.2
yyd, xxd, nrd = dust_kc(xx/(1.+zmc), yy, Av00, nr, Rv=4.05, gamma=-0.2)
else:
yyd, xxd, nrd = dust_calz(xx/(1.+zmc), yy, Av00, nr)
xxd *= (1.+zmc)
nrd_yyd = np.zeros((len(nrd),3), dtype='float')
nrd_yyd[:,0] = nrd[:]
nrd_yyd[:,1] = yyd[:]
nrd_yyd[:,2] = xxd[:]
nrd_yyd_sort = nrd_yyd[nrd_yyd[:,0].argsort()]
if not f_nrd:
return nrd_yyd_sort[:,1],nrd_yyd_sort[:,2]
else:
return nrd_yyd_sort[:,0],nrd_yyd_sort[:,1],nrd_yyd_sort[:,2]
def tmp04_dust(self, par, nprec=1):
'''
Makes model template with a given param setself.
Also dust attenuation.
'''
tau0= self.tau0
ZZ = self.ZZ
AA = self.AA
bfnc = self.MB.bfnc
DIR_TMP = self.MB.DIR_TMP
try:
m_dust = par['MDUST']
t_dust = par['TDUST']
except: # This is exception for initial minimizing;
m_dust = -99
t_dust = 0
nr = self.MB.lib_dust[:,0]
xx = self.MB.lib_dust[:,1] # This is OBSERVED wavelength range at z=zgal
coln= 2+int(t_dust+0.5)
yy = 10**m_dust * self.MB.lib_dust[:,coln]
if self.MB.fzmc == 1:
zmc = par.params['zmc'].value
else:
zmc = self.MB.zgal
# How much does this cost in time?
if round(zmc,nprec) != round(self.MB.zgal,nprec):
xx_s = xx / (1+self.MB.zgal) * (1+zmc)
fint = interpolate.interp1d(xx, yy, kind='nearest', fill_value="extrapolate")
yy_s = fint(xx_s)
else:
xx_s = xx
yy_s = yy
return yy_s, xx_s
class Func_tau:
'''
'''
def __init__(self, MB, dust_model=0):
'''
Parameters:
-----------
dust_model : int
0 for Calzetti. 1 for MW. 4 for Kriek Conroy
'''
self.MB = MB
self.ID = MB.ID
self.ZZ = MB.Zall
self.AA = MB.nage
self.tau = MB.tau
self.dust_model = dust_model
self.DIR_TMP = MB.DIR_TMP
if MB.f_dust:
self.Temp = MB.Temp
try:
self.filts = MB.filts
self.DIR_FIL = MB.DIR_FILT
except:
pass
# Already Read or not;
self.f_af = False
self.f_af0 = False
def demo(self):
ZZ = self.ZZ
AA = self.AA
return ZZ, AA
def open_spec_fits(self, fall=0, orig=False):
'''
Loads template in obs range.
'''
ID0 = self.MB.ID
from astropy.io import fits
ZZ = self.ZZ
AA = self.AA
bfnc = self.MB.bfnc
# ASDF;
if fall == 0:
app = ''
hdu0 = self.MB.af['spec']
elif fall == 1:
app = 'all_'
hdu0 = self.MB.af['spec_full']
DIR_TMP = self.DIR_TMP
NZ = len(ZZ)
NT = self.MB.ntau
NA = self.MB.nage
for zz,Z in enumerate(ZZ):
for tt,TT in enumerate(self.MB.tau):
for ss,TA in enumerate(self.MB.ageparam):
if zz == 0 and tt == 0 and ss == 0:
nr = hdu0['colnum']
xx = hdu0['wavelength']
coln = int(2 + NZ * NT * NA) # + self.MB.ntau * self.MB.nage + NA)
lib = np.zeros((len(nr), coln), dtype='float')
lib[:,0] = nr[:]
lib[:,1] = xx[:]
if orig:
colname = 'fspec_orig_' + str(zz) + '_' + str(tt) + '_' + str(ss)
else:
colname = 'fspec_' + str(zz) + '_' + str(tt) + '_' + str(ss)
colnall = int(2 + zz * NT * NA + tt * NA + ss) # 2 takes account of wavelength and AV columns.
lib[:,colnall] = hdu0[colname]
return lib
def open_spec_dust_fits(self, fall=0):
'''
Load dust template in obs range.
'''
ID0 = self.MB.ID
tau0= self.MB.tau0 #[0.01,0.02,0.03]
from astropy.io import fits
ZZ = self.ZZ
AA = self.AA
bfnc = self.MB.bfnc #Basic(ZZ)
self.MB.af = asdf.open(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf')
self.MB.af0 = asdf.open(self.DIR_TMP + 'spec_all.asdf')
if fall == 0:
app = ''
hdu0 = self.MB.af['spec_dust']
elif fall == 1:
app = 'all_'
hdu0 = self.MB.af['spec_dust_full']
DIR_TMP = self.DIR_TMP
nr = hdu0['colnum']
xx = hdu0['wavelength']
lib = np.zeros((len(nr), 2+len(self.Temp)), dtype='float')
lib[:,0] = nr[:]
lib[:,1] = xx[:]
for aa in range(len(self.Temp)):
coln = int(2 + aa)
colname = 'fspec_' + str(aa)
colnall = int(2 + aa) # 2 takes account of wavelength and AV columns.
lib[:,colnall] = hdu0[colname]
if fall==1 and False:
import matplotlib.pyplot as plt
plt.close()
plt.plot(lib[:,1],lib[:,coln],linestyle='-')
plt.show()
return lib
def open_spec_fits_dir(self, nage, nz, kk, Av00, zgal, A00):
'''
Loads template in obs range.
But for weird template.
'''
from astropy.io import fits
tau0= self.tau0 #[0.01,0.02,0.03]
ZZ = self.ZZ
AA = self.AA
bfnc = self.MB.bfnc #Basic(ZZ)
self.MB.af = asdf.open(self.DIR_TMP + 'spec_all_' + self.ID + '.asdf')
self.MB.af0 = asdf.open(self.DIR_TMP + 'spec_all.asdf')
app = 'all'
hdu0 = self.MB.af['spec_full']
DIR_TMP = self.DIR_TMP #'./templates/'
pp = 0
zz = nz
# Luminosity
mshdu = self.MB.af0['ML']
Ls = mshdu['Ls_%d'%nz]
xx = hdu0['wavelength'] # at RF;
nr = np.arange(0,len(xx),1) #hdu0.data['colnum']
lib = np.zeros((len(nr), 2+1), dtype='float')
lib[:,0] = nr[:]
lib[:,1] = xx[:]
aa = nage
coln = int(2 + aa)
colname = 'fspec_' + str(zz) + '_' + str(aa) + '_' + str(pp)
yy0 = hdu0[colname]/Ls[aa]
yy = flamtonu(xx, yy0)
lib[:,2] = yy[:]
if self.dust_model == 0: # Calzetti
yyd, xxd, nrd = dust_calz(xx, yy, Av00, nr)
elif self.dust_model == 1: # MW
yyd, xxd, nrd = dust_mw(xx, yy, Av00, nr)
elif self.dust_model == 2: # LMC
yyd, xxd, nrd = dust_gen(xx, yy, Av00, nr, Rv=4.05, gamma=-0.06, Eb=2.8)
elif self.dust_model == 3: # SMC
yyd, xxd, nrd = dust_gen(xx, yy, Av00, nr, Rv=4.05, gamma=-0.42, Eb=0.0)
elif self.dust_model == 4: # Kriek&Conroy with gamma=-0.2
yyd, xxd, nrd = dust_kc(xx, yy, Av00, nr, Rv=4.05, gamma=-0.2)
else:
print('No entry. Dust model is set to Calzetti')
yyd, xxd, nrd = dust_calz(xx, yy, Av00, nr)
xxd *= (1.+zgal)
nrd_yyd = np.zeros((len(nrd),3), dtype='float')
nrd_yyd[:,0] = nrd[:]
nrd_yyd[:,1] = yyd[:]
nrd_yyd[:,2] = xxd[:]
b = nrd_yyd
nrd_yyd_sort = b[np.lexsort(([-1,1]*b[:,[1,0]]).T)]
yyd_sort = nrd_yyd_sort[:,1]
xxd_sort = nrd_yyd_sort[:,2]
return A00 * yyd_sort, xxd_sort
def tmp04(self, par, f_Alog=True, nprec=1, f_val=False, check_bound=False, lib_all=False, f_nrd=False):
'''
Makes model template with a given param set.
Also dust attenuation.
Parameters:
-----------
nprec : int
Precision when redshift is refined.
'''
ZZ = self.ZZ
AA = self.AA
bfnc = self.MB.bfnc
Mtot = 0
pp = 0
if f_val:
par = par.params
if self.MB.fzmc == 1:
try:
zmc = par['zmc'].value
except:
zmc = self.MB.zgal
else:
zmc = self.MB.zgal
if check_bound:
# AV limit;
if par['Av'] < self.MB.Avmin:
par['Av'] = self.MB.Avmin
if par['Av'] > self.MB.Avmax:
par['Av'] = self.MB.Avmax
Av00 = par['Av']
for aa in range(self.MB.npeak):
if self.MB.ZEVOL==1 or aa == 0:
if check_bound:
# Z limit:
if par['Z%d'%aa] < self.MB.Zmin:
par['Z%d'%aa] = self.MB.Zmin
if par['Z%d'%aa] > self.MB.Zmax:
par['Z%d'%aa] = self.MB.Zmax
Z = par['Z%d'%aa]
else:
pass
if check_bound:
# A
if par['A'+str(aa)] < self.MB.Amin:
par['A'+str(aa)] = self.MB.Amin
if par['A'+str(aa)] > self.MB.Amax:
par['A'+str(aa)] = self.MB.Amax
if par['TAU'+str(aa)] < self.MB.taumin:
par['TAU'+str(aa)] = self.MB.taumin
if par['TAU'+str(aa)] > self.MB.taumax:
par['TAU'+str(aa)] = self.MB.taumax
if par['AGE'+str(aa)] < self.MB.agemin:
par['AGE'+str(aa)] = self.MB.agemin
if par['AGE'+str(aa)] > self.MB.agemax:
par['AGE'+str(aa)] = self.MB.agemax
# Is A in logspace?
if f_Alog:
A00 = 10**par['A'+str(aa)]
else:
A00 = par['A'+str(aa)]
tau,age = par['TAU%d'%aa],par['AGE%d'%aa]
NZ, NT, NA = bfnc.Z2NZ(Z,tau,age)
coln = int(2 + NZ*self.MB.ntau*self.MB.nage + NT*self.MB.nage + NA)
mslist = self.MB.af['ML']['ML_'+str(NZ)+'_'+str(NT)][NA]
Mtot += 10**(par['A%d'%aa] + np.log10(mslist))
if lib_all:
if aa == 0:
nr = self.MB.lib_all[:, 0]
xx = self.MB.lib_all[:, 1] # This is OBSERVED wavelength range at z=zgal
yy = A00 * self.MB.lib_all[:, coln]
else:
yy += A00 * self.MB.lib_all[:, coln]
else:
if aa == 0:
nr = self.MB.lib[:, 0]
xx = self.MB.lib[:, 1] # This is OBSERVED wavelength range at z=zgal
yy = A00 * self.MB.lib[:, coln]
else:
yy += A00 * self.MB.lib[:, coln]
# Keep logM
self.MB.logMtmp = np.log10(Mtot)
# Redshift refinement;
if round(zmc,nprec) != round(self.MB.zgal,nprec): # Not sure how much this costs in time.
xx_s = xx / (1+self.MB.zgal) * (1+zmc)
fint = interpolate.interp1d(xx, yy, kind='nearest', fill_value="extrapolate")
yy_s = fint(xx_s)
else:
xx_s = xx
yy_s = yy
xx = xx_s
yy = yy_s
if self.dust_model == 0:
yyd, xxd, nrd = dust_calz(xx/(1.+zmc), yy, Av00, nr)
elif self.dust_model == 1:
yyd, xxd, nrd = dust_mw(xx/(1.+zmc), yy, Av00, nr)
elif self.dust_model == 2: # LMC
yyd, xxd, nrd = dust_gen(xx/(1.+zmc), yy, Av00, nr, Rv=4.05, gamma=-0.06, Eb=2.8)
elif self.dust_model == 3: # SMC
yyd, xxd, nrd = dust_gen(xx/(1.+zmc), yy, Av00, nr, Rv=4.05, gamma=-0.42, Eb=0.0)
elif self.dust_model == 4: # Kriek&Conroy with gamma=-0.2
yyd, xxd, nrd = dust_kc(xx/(1.+zmc), yy, Av00, nr, Rv=4.05, gamma=-0.2)
else:
yyd, xxd, nrd = dust_calz(xx/(1.+zmc), yy, Av00, nr)
xxd *= (1.+zmc)
if self.dust_model == 0:
if not f_nrd:
return yyd,xxd
else:
return nrd,yyd,xxd
else:
nrd_yyd = np.zeros((len(nrd),3), dtype='float')
nrd_yyd[:,0] = nrd[:]
nrd_yyd[:,1] = yyd[:]
nrd_yyd[:,2] = xxd[:]
nrd_yyd_sort = nrd_yyd[nrd_yyd[:,0].argsort()]
if not f_nrd:
return nrd_yyd_sort[:,1],nrd_yyd_sort[:,2]
else:
return nrd_yyd_sort[:,0],nrd_yyd_sort[:,1],nrd_yyd_sort[:,2]
def tmp04_dust(self, par, nprec=1):
'''
Makes model template with a given param setself.
Also dust attenuation.
'''
bfnc = self.MB.bfnc #Basic(ZZ)
DIR_TMP = self.MB.DIR_TMP #'./templates/'
try:
m_dust = par['MDUST']
t_dust = par['TDUST']
except: # This is exception for initial minimizing;
m_dust = -99
t_dust = 0
nr = self.MB.lib_dust[:,0]
xx = self.MB.lib_dust[:,1] # This is OBSERVED wavelength range at z=zgal
coln= 2+int(t_dust+0.5)
yy = 10**m_dust * self.MB.lib_dust[:,coln]
if self.MB.fzmc == 1:
zmc = par.params['zmc'].value
else:
zmc = self.MB.zgal
# How much does this cost in time?
if round(zmc,nprec) != round(self.MB.zgal,nprec):
xx_s = xx / (1+self.MB.zgal) * (1+zmc)
fint = interpolate.interp1d(xx, yy, kind='nearest', fill_value="extrapolate")
yy_s = fint(xx_s)
else:
xx_s = xx
yy_s = yy
return yy_s, xx_s
| 31.823461
| 123
| 0.464964
| 3,865
| 27,400
| 3.194825
| 0.077878
| 0.066569
| 0.026239
| 0.037901
| 0.871234
| 0.855037
| 0.83965
| 0.820376
| 0.806122
| 0.793732
| 0
| 0.040324
| 0.382737
| 27,400
| 861
| 124
| 31.823461
| 0.689765
| 0.111934
| 0
| 0.874172
| 0
| 0
| 0.033716
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02649
| false
| 0.006623
| 0.023179
| 0
| 0.084437
| 0.004967
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b8ae72a8774e3f5e5b83670734de99743ac5f598
| 94
|
py
|
Python
|
Server/programs/__init__.py
|
VHirtz/CC-mastermind
|
11dc4e043ed67c86e66230812cbd86f736e6a7d1
|
[
"MIT"
] | null | null | null |
Server/programs/__init__.py
|
VHirtz/CC-mastermind
|
11dc4e043ed67c86e66230812cbd86f736e6a7d1
|
[
"MIT"
] | null | null | null |
Server/programs/__init__.py
|
VHirtz/CC-mastermind
|
11dc4e043ed67c86e66230812cbd86f736e6a7d1
|
[
"MIT"
] | null | null | null |
from . import program
from . import turtle_test
from . import antoine_test
from . import dance
| 23.5
| 26
| 0.797872
| 14
| 94
| 5.214286
| 0.5
| 0.547945
| 0.383562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159574
| 94
| 4
| 27
| 23.5
| 0.924051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b285955d688db6c4b472e2c5faffe22749cd5bcf
| 7,081
|
py
|
Python
|
ssh/factorcheck.py
|
riquelmev/cs338
|
cdbff5e25b112a9fb2e039f59c0ebf036649ffd8
|
[
"MIT"
] | null | null | null |
ssh/factorcheck.py
|
riquelmev/cs338
|
cdbff5e25b112a9fb2e039f59c0ebf036649ffd8
|
[
"MIT"
] | null | null | null |
ssh/factorcheck.py
|
riquelmev/cs338
|
cdbff5e25b112a9fb2e039f59c0ebf036649ffd8
|
[
"MIT"
] | null | null | null |
import numpy
import math
print(math.lcm(0x00eca08bfa42dcad582302232a80813894fd2e4b842dca21eba465619a0d464a9f864ab2e9c0be42367d63c595e81385dcb66bbf8242cddb848969f883af2fbb8c1490a3932c03d15b2d7dfb08dd2c61e05978fbfd337e70ba838574cfe443658910aef9303e968d32351339c14a3c08920a5c1a854cea5af98bd32f1098a2fc5f8a468009c7c063f48c29a688bc485f580625883b8a13ff655d34a11f927ddcfadfdc25c9e318127a83e8fb48ada3f531a5160fc9849852e2e51cba9001cc18e4,
0x00d63e8c9986e6067792268a91b4b65721256fe5ff7de459f80348b882d67a024032e38d9dc3d12943e95f97c9efe381399f16697311ad2766ab98dbe08c30fcd312754bbeb344c88fa2f8ff7ce6ac36d68e4950dfd6599270cfa9b36cec3384323efe64731a69aedee1761104f65a6f84eab6806c90af902b7a24c422cf4673986eb7b18650de51b10109de23668e471354f543b2d05386f4aa44feaf00fe0e0ca8335ba9cd0a0cd7b44233fcec489a3217eb3da1d9b51c4d8e9ba40cfd6cb7aa))
print (( (65537 * 2943845207193600139849586921660530062979514836939652252911168510314905302166532845264906113584033646531012076406573806987025047457519902435411802267739360377120761697446091031629022721340581940013244671666962132695199042194704089512690548281464483553640422003142860526990759194808923501682158662399385088877090264964084503057490757632128265341366808789218428209326618760642760356184383281196480504761667539912421070047089521150757775831975677601090160692307767419292257798639731268363386233177395498370665722400495226560396671910091288741087409721516597979322885628216630331527097105539998928620712679031068142304793554336036922257467880853151468114731275288628988864368750827488439382991282564278525342098508917887127750683566587189942598936549588448717091038482697327056078134954278878301931522106687291086778640089700384840670406150969051320700177941289226071446754539534444766951378823161600415971105082067617171855980113)
% 2247039172418436668592154415151015126222786674452760187503368863970509536315956942465946330840400804713521295730929741305714657992353620380964165912192341731136307469898957232004091102824338674617377312450939870608493589894180315797731195699072185635394040726997130798478842130796557413577261032584072916023035927031809993907276633856706151009517313622397019910955492822225070876581131226412459152580542808796183783690613859162091921205452946458684438170181390092687592585015747357730389512738725469097581172245064706069050974691027868509488068610750445862693733466299013534093773154038841250698994256296984775707305557541589235662563155223305238362859813517247589601725306580259839877045186180003746975834031900204620211932784805784617611303338578827900908401922205156339089130334248484128507875195736838993177401998121291885662897832705086377879426528514698451483880180031084401254280385901954419537599741014039443185713588 == 1))
print((32**65537) % 2247039172418436668592154415151015126222786674452760187503368863970509536315956942465946330840400804713521295730929741305714657992353620380964165912192341731136307469898957232004091102824338674617377312450939870608493589894180315797731195699072185635394040726997130798478842130796557413577261032584072916023035927031809993907276633856706151009517313622397019910955492822225070876581131226412459152580542808796183783690613859162091921205452946458684438170181390092687592585015747357730389512738725469097581172245064706069050974691027868509488068610750445862693733466299013534093773154038841250698994256296984775707305557541589235662563155223305238362859813517247589601725306580259839877045186180003746975834031900204620211932784805784617611303338578827900908401922205156339089130334248484128507875195736838993177401998121291885662897832705086377879426528514698451483880180031084401254280385901954419537599741014039443185713588)
print(2943845207193600139849586921660530062979514836939652252911168510314905302166532845264906113584033646531012076406573806987025047457519902435411802267739360377120761697446091031629022721340581940013244671666962132695199042194704089512690548281464483553640422003142860526990759194808923501682158662399385088877090264964084503057490757632128265341366808789218428209326618760642760356184383281196480504761667539912421070047089521150757775831975677601090160692307767419292257798639731268363386233177395498370665722400495226560396671910091288741087409721516597979322885628216630331527097105539998928620712679031068142304793554336036922257467880853151468114731275288628988864368750827488439382991282564278525342098508917887127750683566587189942598936549588448717091038482697327056078134954278878301931522106687291086778640089700384840670406150969051320700177941289226071446754539534444766951378823161600415971105082067617171855980113%0x00eca08bfa42dcad582302232a80813894fd2e4b842dca21eba465619a0d464a9f864ab2e9c0be42367d63c595e81385dcb66bbf8242cddb848969f883af2fbb8c1490a3932c03d15b2d7dfb08dd2c61e05978fbfd337e70ba838574cfe443658910aef9303e968d32351339c14a3c08920a5c1a854cea5af98bd32f1098a2fc5f8a468009c7c063f48c29a688bc485f580625883b8a13ff655d34a11f927ddcfadfdc25c9e318127a83e8fb48ada3f531a5160fc9849852e2e51cba9001cc18e4
== 0x283f4a6fbfad9f424d7a10972b124f986fd3cefe65776afb9493b5dd2902dab0757c0120672b3541e563f1f88467c5adfbcd29deb31426914d7a1bcdf21f314c2b374acb3e824bbab16b2b269fcfebb9e81dfee65b3ad75bb201221436240c821ab758250f9035e5e34728dcaa8eb97a758ea2e82763f92356d80dba49ebf6f71d22cea65b366b09ee492b4d38912abe6315412db7579d6a15475d5c6c634211ddbfa921c4a1948b0822b992ec0de6279287c519a696ee0a2fa40a4b7232cfcd)
print(2943845207193600139849586921660530062979514836939652252911168510314905302166532845264906113584033646531012076406573806987025047457519902435411802267739360377120761697446091031629022721340581940013244671666962132695199042194704089512690548281464483553640422003142860526990759194808923501682158662399385088877090264964084503057490757632128265341366808789218428209326618760642760356184383281196480504761667539912421070047089521150757775831975677601090160692307767419292257798639731268363386233177395498370665722400495226560396671910091288741087409721516597979322885628216630331527097105539998928620712679031068142304793554336036922257467880853151468114731275288628988864368750827488439382991282564278525342098508917887127750683566587189942598936549588448717091038482697327056078134954278878301931522106687291086778640089700384840670406150969051320700177941289226071446754539534444766951378823161600415971105082067617171855980113%
0x00d63e8c9986e6067792268a91b4b65721256fe5ff7de459f80348b882d67a024032e38d9dc3d12943e95f97c9efe381399f16697311ad2766ab98dbe08c30fcd312754bbeb344c88fa2f8ff7ce6ac36d68e4950dfd6599270cfa9b36cec3384323efe64731a69aedee1761104f65a6f84eab6806c90af902b7a24c422cf4673986eb7b18650de51b10109de23668e471354f543b2d05386f4aa44feaf00fe0e0ca8335ba9cd0a0cd7b44233fcec489a3217eb3da1d9b51c4d8e9ba40cfd6cb7aa
== 0x47d9c4577cc94a23f1ace14e0a5818927236bbe0da7ca9bba6864df2fb3101ee3be2daccad2e49739021d20b145bad2c00f1883de210bb2510a97c1c2b880652575f651eb88a79e4ca184dbebab1c8d65df3b29ecf094d366e3e9081181a12dcb309a7f07e4c312c685aab4c89be3ca64bfd16c6d2233eeb85d42cbf2bda89cbf65dbeb8b8084747607cc9b5ff9ff9b03f0ede3c6ae7885c277a6a1b90eea311959b5bc36f934e494d17e2cd9104ac49de81b332c38b9cc959e952b4548d906f)
| 337.190476
| 1,320
| 0.990679
| 26
| 7,081
| 269.807692
| 0.538462
| 0.265146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.86223
| 0.00466
| 7,081
| 20
| 1,321
| 354.05
| 0.133087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.328202
| 0
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.166667
| 0.416667
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
b2ae0f0ae136e69e9eedb942d08d354586e0fafa
| 4,850
|
py
|
Python
|
HyperAPI/hdp_api/routes/nitro.py
|
RomainGeffraye/HyperAPI
|
6bcd831ee48abb3a4f67f85051bc0d2a07c7aaef
|
[
"BSD-3-Clause"
] | null | null | null |
HyperAPI/hdp_api/routes/nitro.py
|
RomainGeffraye/HyperAPI
|
6bcd831ee48abb3a4f67f85051bc0d2a07c7aaef
|
[
"BSD-3-Clause"
] | null | null | null |
HyperAPI/hdp_api/routes/nitro.py
|
RomainGeffraye/HyperAPI
|
6bcd831ee48abb3a4f67f85051bc0d2a07c7aaef
|
[
"BSD-3-Clause"
] | null | null | null |
from HyperAPI.hdp_api.routes import Resource, Route
from HyperAPI.hdp_api.routes.base.version_management import available_since
class Nitro(Resource):
name = "nitro"
class _getForecasts(Route):
name = "getForecasts"
httpMethod = Route.POST
path = "/nitro/projects/{project_ID}/datasets/{dataset_ID}/forecasts"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID
}
class _getForecast(Route):
name = "getForecast"
httpMethod = Route.GET
path = "/nitro/projects/{project_ID}/datasets/{dataset_ID}/forecasts/{forecast_ID}"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'forecast_ID': Route.VALIDATOR_OBJECTID
}
class _insertForecast(Route):
name = "insertForecast"
httpMethod = Route.POST
path = "/nitro/projects/{project_ID}/datasets/{dataset_ID}/forecasts/add"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID
}
class _updateForecast(Route):
name = "updateForecast"
httpMethod = Route.POST
path = "/nitro/projects/{project_ID}/datasets/{dataset_ID}/forecasts/{forecast_ID}"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'forecast_ID': Route.VALIDATOR_OBJECTID
}
@available_since('2.0')
class _updateForecastCoef(Route):
name = "updateForecastCoef"
httpMethod = Route.POST
path = "/nitro/projects/{project_ID}/datasets/{dataset_ID}/forecasts/{forecast_ID}/tunes/updatecoef"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'forecast_ID': Route.VALIDATOR_OBJECTID
}
class _deleteForecast(Route):
name = "deleteForecast"
httpMethod = Route.POST
path = "/nitro/projects/{project_ID}/datasets/{dataset_ID}/forecasts/{forecast_ID}/delete"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'forecast_ID': Route.VALIDATOR_OBJECTID
}
class _getForecastTunes(Route):
name = "getForecastTunes"
httpMethod = Route.POST
path = "/nitro/projects/{project_ID}/datasets/{dataset_ID}/forecasts/{forecast_ID}/tunes"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'forecast_ID': Route.VALIDATOR_OBJECTID
}
class _updateForecastTunes(Route):
name = "updateForecastTunes"
httpMethod = Route.POST
path = "/nitro/projects/{project_ID}/datasets/{dataset_ID}/forecasts/{forecast_ID}/tunes/update"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'forecast_ID': Route.VALIDATOR_OBJECTID
}
class _getForecastTunesAggregateGeo(Route):
name = "getForecastTunesAggregateGeo"
httpMethod = Route.POST
path = "/nitro/projects/{project_ID}/datasets/{dataset_ID}/forecasts/{forecast_ID}/tunes/aggregate/geo"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'forecast_ID': Route.VALIDATOR_OBJECTID
}
class _getForecastTunesAggregateDepot(Route):
name = "getForecastTunesAggregateDepot"
httpMethod = Route.POST
path = "/nitro/projects/{project_ID}/datasets/{dataset_ID}/forecasts/{forecast_ID}/tunes/aggregate/depot"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'forecast_ID': Route.VALIDATOR_OBJECTID
}
class _exportForecastTunes(Route):
name = "exportForecastTunes"
httpMethod = Route.GET
path = "/nitro/projects/{project_ID}/datasets/{dataset_ID}/forecasts/{forecast_ID}/tunes/export"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'forecast_ID': Route.VALIDATOR_OBJECTID
}
@available_since('2.0')
class _exportReport(Route):
name = "exportReport"
httpMethod = Route.GET
path = "/nitro/projects/{project_ID}/datasets/{dataset_ID}/forecasts/{forecast_ID}/tunes/exportreport"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'forecast_ID': Route.VALIDATOR_OBJECTID
}
| 38.188976
| 113
| 0.640412
| 467
| 4,850
| 6.342612
| 0.11349
| 0.080351
| 0.18366
| 0.27549
| 0.77549
| 0.759284
| 0.759284
| 0.759284
| 0.759284
| 0.759284
| 0
| 0.001104
| 0.252784
| 4,850
| 126
| 114
| 38.492063
| 0.816225
| 0
| 0
| 0.553571
| 0
| 0
| 0.319381
| 0.214227
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a2ac4d61989a683d4c9f7b828fb2128fcf9a33a2
| 7,934
|
py
|
Python
|
ivy/container/gradients.py
|
Aarsh2001/ivy
|
827164d7d31bd08c5287bbd1ac9ccce588b733bc
|
[
"Apache-2.0"
] | null | null | null |
ivy/container/gradients.py
|
Aarsh2001/ivy
|
827164d7d31bd08c5287bbd1ac9ccce588b733bc
|
[
"Apache-2.0"
] | null | null | null |
ivy/container/gradients.py
|
Aarsh2001/ivy
|
827164d7d31bd08c5287bbd1ac9ccce588b733bc
|
[
"Apache-2.0"
] | null | null | null |
from typing import Optional, Union, List, Dict
# local
import ivy
from ivy.container.base import ContainerBase
# noinspection PyMissingConstructor
class ContainerWithGradients(ContainerBase):
@staticmethod
def static_optimizer_update(
w,
effective_grad,
lr,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
) -> ivy.Container:
return ContainerBase.multi_map_in_static_method(
"optimizer_update",
w,
effective_grad,
lr,
inplace=inplace,
stop_gradients=stop_gradients,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)
def optimizer_update(
self: ivy.Container,
effective_grad,
lr,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
) -> ivy.Container:
return self.static_optimizer_update(
self,
effective_grad,
lr,
inplace,
stop_gradients,
key_chains,
to_apply,
prune_unapplied,
map_sequences,
)
@staticmethod
def static_gradient_descent_update(
w,
dcdw,
lr,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
) -> ivy.Container:
return ContainerBase.multi_map_in_static_method(
"gradient_descent_update",
w,
dcdw,
lr,
inplace=inplace,
stop_gradients=stop_gradients,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)
def gradient_descent_update(
self,
dcdw,
lr,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
):
return self.static_gradient_descent_update(
self,
dcdw,
lr,
inplace,
stop_gradients,
key_chains,
to_apply,
prune_unapplied,
map_sequences,
)
@staticmethod
def static_lars_update(
w,
dcdw,
lr,
decay_lambda=0,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
) -> ivy.Container:
return ContainerBase.multi_map_in_static_method(
"lars_update",
w,
dcdw,
lr,
decay_lambda=decay_lambda,
inplace=inplace,
stop_gradients=stop_gradients,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)
def lars_update(
self,
dcdw,
lr,
decay_lambda=0,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
):
return self.static_lars_update(
self,
dcdw,
lr,
decay_lambda,
inplace,
stop_gradients,
key_chains,
to_apply,
prune_unapplied,
map_sequences,
)
@staticmethod
def static_adam_update(
w,
dcdw,
lr,
mw_tm1,
vw_tm1,
step,
beta1=0.9,
beta2=0.999,
epsilon=1e-7,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
) -> ivy.Container:
return ContainerBase.multi_map_in_static_method(
"adam_update",
w,
dcdw,
lr,
mw_tm1=mw_tm1,
vw_tm1=vw_tm1,
step=step,
beta1=beta1,
beta2=beta2,
epsilon=epsilon,
inplace=inplace,
stop_gradients=stop_gradients,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)
def adam_update(
self,
dcdw,
lr,
mw_tm1,
vw_tm1,
step,
beta1=0.9,
beta2=0.999,
epsilon=1e-7,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
):
return self.static_adam_update(
self,
dcdw,
lr,
mw_tm1,
vw_tm1,
step,
beta1,
beta2,
epsilon,
inplace,
stop_gradients,
key_chains,
to_apply,
prune_unapplied,
map_sequences,
)
@staticmethod
def static_lamb_update(
w,
dcdw,
lr,
mw_tm1,
vw_tm1,
step,
beta1=0.9,
beta2=0.999,
epsilon=1e-7,
max_trust_ratio=10,
decay_lambda=0,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
) -> ivy.Container:
return ContainerBase.multi_map_in_static_method(
"lamb_update",
w,
dcdw,
lr,
mw_tm1=mw_tm1,
vw_tm1=vw_tm1,
step=step,
beta1=beta1,
beta2=beta2,
epsilon=epsilon,
max_trust_ratio=max_trust_ratio,
decay_lambda=0,
inplace=inplace,
stop_gradients=stop_gradients,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)
def lamb_update(
self,
dcdw,
lr,
mw_tm1,
vw_tm1,
step,
beta1=0.9,
beta2=0.999,
epsilon=1e-7,
max_trust_ratio=10,
decay_lambda=0,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
):
return self.static_lamb_update(
self,
dcdw,
lr,
mw_tm1,
vw_tm1,
step,
beta1,
beta2,
epsilon,
max_trust_ratio,
decay_lambda,
inplace,
stop_gradients,
key_chains,
to_apply,
prune_unapplied,
map_sequences,
)
| 25.511254
| 70
| 0.515755
| 792
| 7,934
| 4.886364
| 0.088384
| 0.083979
| 0.04832
| 0.062016
| 0.92739
| 0.917829
| 0.917829
| 0.856848
| 0.856848
| 0.856848
| 0
| 0.017172
| 0.40547
| 7,934
| 310
| 71
| 25.593548
| 0.803265
| 0.004916
| 0
| 0.870748
| 0
| 0
| 0.009123
| 0.002914
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034014
| false
| 0
| 0.010204
| 0.034014
| 0.081633
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c5539475c0da1f3dfc53cbf5dc335c43077d9cf
| 2,835
|
py
|
Python
|
services/backend/expiring_links/tests/test_expiring_link_generator_serializer.py
|
patpio/drf_images_api
|
ef689bac10ce8b9d2f03d6b647fa4bbd70b02f1c
|
[
"Beerware"
] | 1
|
2022-02-27T16:34:46.000Z
|
2022-02-27T16:34:46.000Z
|
services/backend/expiring_links/tests/test_expiring_link_generator_serializer.py
|
patpio/drf_images_api
|
ef689bac10ce8b9d2f03d6b647fa4bbd70b02f1c
|
[
"Beerware"
] | null | null | null |
services/backend/expiring_links/tests/test_expiring_link_generator_serializer.py
|
patpio/drf_images_api
|
ef689bac10ce8b9d2f03d6b647fa4bbd70b02f1c
|
[
"Beerware"
] | null | null | null |
import pytest
from expiring_links.serializers import ExpiringLinkGeneratorSerializer
@pytest.mark.serializers
def test_fields(db, create_test_expiring_link_serializer_data):
assert list(create_test_expiring_link_serializer_data.keys()) == ['image_id', 'expiration_time']
@pytest.mark.serializers
def test_valid_serializer(db, create_test_expiring_link_serializer_data, create_test_image, create_test_user,
remove_test_data):
serializer = ExpiringLinkGeneratorSerializer(data=create_test_expiring_link_serializer_data,
context={'user': create_test_user})
assert serializer.is_valid()
@pytest.mark.serializers
def test_user_without_permission(db, create_test_expiring_link_serializer_data, create_test_image, create_test_user,
remove_test_data):
create_test_user.tier.expired_link_flag = False
serializer = ExpiringLinkGeneratorSerializer(data=create_test_expiring_link_serializer_data,
context={'user': create_test_user})
assert not serializer.is_valid()
assert set(serializer.errors) == {'non_field_errors'}
@pytest.mark.serializers
def test_wrong_image_id(db, create_test_expiring_link_serializer_data, create_test_image, create_test_user,
remove_test_data):
create_test_expiring_link_serializer_data['image_id'] = create_test_image.pk + 1
serializer = ExpiringLinkGeneratorSerializer(data=create_test_expiring_link_serializer_data,
context={'user': create_test_user})
assert not serializer.is_valid()
assert set(serializer.errors) == {'image_id'}
@pytest.mark.serializers
def test_too_short_expiration_time(db, create_test_expiring_link_serializer_data, create_test_image, create_test_user,
remove_test_data):
create_test_expiring_link_serializer_data['expiration_time'] = 200
serializer = ExpiringLinkGeneratorSerializer(data=create_test_expiring_link_serializer_data,
context={'user': create_test_user})
assert not serializer.is_valid()
assert set(serializer.errors) == {'expiration_time'}
@pytest.mark.serializers
def test_too_long_expiration_time(db, create_test_expiring_link_serializer_data, create_test_image, create_test_user,
remove_test_data):
create_test_expiring_link_serializer_data['expiration_time'] = 40000
serializer = ExpiringLinkGeneratorSerializer(data=create_test_expiring_link_serializer_data,
context={'user': create_test_user})
assert not serializer.is_valid()
assert set(serializer.errors) == {'expiration_time'}
| 42.954545
| 118
| 0.71358
| 313
| 2,835
| 5.971246
| 0.15016
| 0.171215
| 0.144462
| 0.176565
| 0.860888
| 0.815944
| 0.778491
| 0.720706
| 0.720706
| 0.720706
| 0
| 0.004052
| 0.216578
| 2,835
| 65
| 119
| 43.615385
| 0.837461
| 0
| 0
| 0.627907
| 0
| 0
| 0.047619
| 0
| 0
| 0
| 0
| 0
| 0.232558
| 1
| 0.139535
| false
| 0
| 0.046512
| 0
| 0.186047
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c553d8f4165e63fa177620f1fa3f79bb1b9cb45
| 91,609
|
py
|
Python
|
com/vmware/nsx/trust_management_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/nsx/trust_management_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
com/vmware/nsx/trust_management_client.py
|
adammillerio/vsphere-automation-sdk-python
|
c07e1be98615201139b26c28db3aa584c4254b66
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2020 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.nsx.trust_management.
#---------------------------------------------------------------------------
"""
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class Certificates(VapiInterface):
"""
"""
LIST_TYPE_CERTIFICATE = "cluster_api_certificate"
"""
Possible value for ``type`` of method :func:`Certificates.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.trust_management.certificates'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _CertificatesStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
cert_id,
):
"""
Removes the specified certificate. The private key associated with the
certificate is also deleted.
:type cert_id: :class:`str`
:param cert_id: ID of certificate to delete (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'cert_id': cert_id,
})
def get(self,
cert_id,
details=None,
):
"""
Returns information for the specified certificate ID, including the
certificate's UUID; resource_type (for example,
certificate_self_signed, certificate_ca, or certificate_signed);
pem_encoded data; and history of the certificate (who created or
modified it and when). For additional information, include the
?details=true modifier at the end of the request URI.
:type cert_id: :class:`str`
:param cert_id: ID of certificate to read (required)
:type details: :class:`bool` or ``None``
:param details: whether to expand the pem data and show all its details (optional,
default to false)
:rtype: :class:`com.vmware.nsx.model_client.Certificate`
:return: com.vmware.nsx.model.Certificate
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'cert_id': cert_id,
'details': details,
})
def importcertificate(self,
trust_object_data,
):
"""
Adds a new private-public certificate or a chain of certificates (CAs)
and, optionally, a private key that can be applied to one of the
user-facing components (appliance management or edge). The certificate
and the key should be stored in PEM format. If no private key is
provided, the certificate is used as a client certificate in the trust
store.
:type trust_object_data: :class:`com.vmware.nsx.model_client.TrustObjectData`
:param trust_object_data: (required)
:rtype: :class:`com.vmware.nsx.model_client.CertificateList`
:return: com.vmware.nsx.model.CertificateList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('importcertificate',
{
'trust_object_data': trust_object_data,
})
def list(self,
cursor=None,
details=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
type=None,
):
"""
Returns all certificate information viewable by the user, including
each certificate's UUID; resource_type (for example,
certificate_self_signed, certificate_ca, or certificate_signed);
pem_encoded data; and history of the certificate (who created or
modified it and when). For additional information, include the
?details=true modifier at the end of the request URI.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type details: :class:`bool` or ``None``
:param details: whether to expand the pem data and show all its details (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type type: :class:`str` or ``None``
:param type: Type of certificate to return (optional)
:rtype: :class:`com.vmware.nsx.model_client.CertificateList`
:return: com.vmware.nsx.model.CertificateList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'details': details,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'type': type,
})
class CrlDistributionPoints(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.trust_management.crl_distribution_points'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _CrlDistributionPointsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
crl_distribution_point,
):
"""
Create an entity that will represent a Crl Distribution Point
:type crl_distribution_point: :class:`com.vmware.nsx.model_client.CrlDistributionPoint`
:param crl_distribution_point: (required)
:rtype: :class:`com.vmware.nsx.model_client.CrlDistributionPoint`
:return: com.vmware.nsx.model.CrlDistributionPoint
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'crl_distribution_point': crl_distribution_point,
})
def delete(self,
crl_distribution_point_id,
):
"""
Delete a CrlDistributionPoint. It does not delete the actual CRL.
:type crl_distribution_point_id: :class:`str`
:param crl_distribution_point_id: Unique id of the CrlDistributionPoint to delete (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'crl_distribution_point_id': crl_distribution_point_id,
})
def get(self,
crl_distribution_point_id,
):
"""
:type crl_distribution_point_id: :class:`str`
:param crl_distribution_point_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.CrlDistributionPoint`
:return: com.vmware.nsx.model.CrlDistributionPoint
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'crl_distribution_point_id': crl_distribution_point_id,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Return the list of CrlDistributionPoints
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.CrlDistributionPointList`
:return: com.vmware.nsx.model.CrlDistributionPointList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def update(self,
crl_distribution_point_id,
crl_distribution_point,
):
"""
:type crl_distribution_point_id: :class:`str`
:param crl_distribution_point_id: (required)
:type crl_distribution_point: :class:`com.vmware.nsx.model_client.CrlDistributionPoint`
:param crl_distribution_point: (required)
:rtype: :class:`com.vmware.nsx.model_client.CrlDistributionPoint`
:return: com.vmware.nsx.model.CrlDistributionPoint
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'crl_distribution_point_id': crl_distribution_point_id,
'crl_distribution_point': crl_distribution_point,
})
class Crls(VapiInterface):
"""
"""
LIST_TYPE_CERTIFICATE = "cluster_api_certificate"
"""
Possible value for ``type`` of method :func:`Crls.list`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.trust_management.crls'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _CrlsStub)
self._VAPI_OPERATION_IDS = {}
def delete(self,
crl_id,
):
"""
Deletes an existing CRL.
:type crl_id: :class:`str`
:param crl_id: ID of CRL to delete (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'crl_id': crl_id,
})
def get(self,
crl_id,
details=None,
):
"""
Returns information about the specified CRL. For additional
information, include the ?details=true modifier at the end of the
request URI.
:type crl_id: :class:`str`
:param crl_id: ID of CRL to read (required)
:type details: :class:`bool` or ``None``
:param details: whether to expand the pem data and show all its details (optional,
default to false)
:rtype: :class:`com.vmware.nsx.model_client.Crl`
:return: com.vmware.nsx.model.Crl
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'crl_id': crl_id,
'details': details,
})
def importcrl(self,
crl_object_data,
):
"""
Adds a new certificate revocation list (CRL). The CRL is used to verify
the client certificate status against the revocation lists published by
the CA. For this reason, the administrator needs to add the CRL in
certificate repository as well.
:type crl_object_data: :class:`com.vmware.nsx.model_client.CrlObjectData`
:param crl_object_data: (required)
:rtype: :class:`com.vmware.nsx.model_client.CrlList`
:return: com.vmware.nsx.model.CrlList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('importcrl',
{
'crl_object_data': crl_object_data,
})
def list(self,
cursor=None,
details=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
type=None,
):
"""
Returns information about all CRLs. For additional information, include
the ?details=true modifier at the end of the request URI.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type details: :class:`bool` or ``None``
:param details: whether to expand the pem data and show all its details (optional,
default to false)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type type: :class:`str` or ``None``
:param type: Type of certificate to return (optional)
:rtype: :class:`com.vmware.nsx.model_client.CrlList`
:return: com.vmware.nsx.model.CrlList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'details': details,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'type': type,
})
def update(self,
crl_id,
crl,
):
"""
Updates an existing CRL.
:type crl_id: :class:`str`
:param crl_id: ID of CRL to update (required)
:type crl: :class:`com.vmware.nsx.model_client.Crl`
:param crl: (required)
:rtype: :class:`com.vmware.nsx.model_client.Crl`
:return: com.vmware.nsx.model.Crl
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'crl_id': crl_id,
'crl': crl,
})
class Csrs(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.trust_management.csrs'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _CsrsStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
csr,
):
"""
Creates a new certificate signing request (CSR). A CSR is encrypted
text that contains information about your organization (organization
name, country, and so on) and your Web server's public key, which is a
public certificate the is generated on the server that can be used to
forward this request to a certificate authority (CA). A private key is
also usually created at the same time as the CSR.
:type csr: :class:`com.vmware.nsx.model_client.Csr`
:param csr: (required)
:rtype: :class:`com.vmware.nsx.model_client.Csr`
:return: com.vmware.nsx.model.Csr
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'csr': csr,
})
def delete(self,
csr_id,
):
"""
Removes a specified CSR. If a CSR is not used for verification, you can
delete it. Note that the CSR import and upload POST actions
automatically delete the associated CSR.
:type csr_id: :class:`str`
:param csr_id: ID of CSR to delete (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'csr_id': csr_id,
})
def get(self,
csr_id,
):
"""
Returns information about the specified CSR.
:type csr_id: :class:`str`
:param csr_id: ID of CSR to read (required)
:rtype: :class:`com.vmware.nsx.model_client.Csr`
:return: com.vmware.nsx.model.Csr
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'csr_id': csr_id,
})
def importcsr(self,
csr_id,
trust_object_data,
):
"""
Imports a certificate authority (CA)-signed certificate for a CSR. This
action links the certificate to the private key created by the CSR. The
pem_encoded string in the request body is the signed certificate
provided by your CA in response to the CSR that you provide to them.
The import POST action automatically deletes the associated CSR.
:type csr_id: :class:`str`
:param csr_id: CSR this certificate is associated with (required)
:type trust_object_data: :class:`com.vmware.nsx.model_client.TrustObjectData`
:param trust_object_data: (required)
:rtype: :class:`com.vmware.nsx.model_client.CertificateList`
:return: com.vmware.nsx.model.CertificateList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('importcsr',
{
'csr_id': csr_id,
'trust_object_data': trust_object_data,
})
def list(self,
cursor=None,
included_fields=None,
page_size=None,
sort_ascending=None,
sort_by=None,
):
"""
Returns information about all of the CSRs that have been created.
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:rtype: :class:`com.vmware.nsx.model_client.CsrList`
:return: com.vmware.nsx.model.CsrList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'cursor': cursor,
'included_fields': included_fields,
'page_size': page_size,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
})
def selfsign(self,
csr_id,
days_valid,
):
"""
Self-signs the previously generated CSR. This action is similar to the
import certificate action, but instead of using a public certificate
signed by a CA, the self_sign POST action uses a certificate that is
signed with NSX's own private key.
:type csr_id: :class:`str`
:param csr_id: CSR this certificate is associated with (required)
:type days_valid: :class:`long`
:param days_valid: Number of days the certificate will be valid, default 10 years
(required)
:rtype: :class:`com.vmware.nsx.model_client.Certificate`
:return: com.vmware.nsx.model.Certificate
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('selfsign',
{
'csr_id': csr_id,
'days_valid': days_valid,
})
class PrincipalIdentities(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.trust_management.principal_identities'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _PrincipalIdentitiesStub)
self._VAPI_OPERATION_IDS = {}
def create(self,
principal_identity,
):
"""
Associates a principal's name with a certificate that is used to
authenticate. Deprecated, use POST
/trust-management/principal-identities/with-certificate instead.
:type principal_identity: :class:`com.vmware.nsx.model_client.PrincipalIdentity`
:param principal_identity: (required)
:rtype: :class:`com.vmware.nsx.model_client.PrincipalIdentity`
:return: com.vmware.nsx.model.PrincipalIdentity
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'principal_identity': principal_identity,
})
def delete(self,
principal_identity_id,
):
"""
Delete a principal identity. It does not delete the certificate.
:type principal_identity_id: :class:`str`
:param principal_identity_id: Unique id of the principal identity to delete (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'principal_identity_id': principal_identity_id,
})
def get(self,
principal_identity_id,
):
"""
Get a stored principal identity
:type principal_identity_id: :class:`str`
:param principal_identity_id: ID of Principal Identity to get (required)
:rtype: :class:`com.vmware.nsx.model_client.PrincipalIdentity`
:return: com.vmware.nsx.model.PrincipalIdentity
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'principal_identity_id': principal_identity_id,
})
def list(self):
"""
Returns the list of principals registered with a certificate.
:rtype: :class:`com.vmware.nsx.model_client.PrincipalIdentityList`
:return: com.vmware.nsx.model.PrincipalIdentityList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list', None)
def updatecertificate(self,
update_principal_identity_certificate_request,
):
"""
Update a principal identity's certificate
:type update_principal_identity_certificate_request: :class:`com.vmware.nsx.model_client.UpdatePrincipalIdentityCertificateRequest`
:param update_principal_identity_certificate_request: (required)
:rtype: :class:`com.vmware.nsx.model_client.PrincipalIdentity`
:return: com.vmware.nsx.model.PrincipalIdentity
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('updatecertificate',
{
'update_principal_identity_certificate_request': update_principal_identity_certificate_request,
})
class _CertificatesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'cert_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/trust-management/certificates/{cert-id}',
path_variables={
'cert_id': 'cert-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'cert_id': type.StringType(),
'details': type.OptionalType(type.BooleanType()),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/trust-management/certificates/{cert-id}',
path_variables={
'cert_id': 'cert-id',
},
query_parameters={
'details': 'details',
},
content_type='application/json'
)
# properties for importcertificate operation
importcertificate_input_type = type.StructType('operation-input', {
'trust_object_data': type.ReferenceType('com.vmware.nsx.model_client', 'TrustObjectData'),
})
importcertificate_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
importcertificate_input_value_validator_list = [
]
importcertificate_output_validator_list = [
]
importcertificate_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/trust-management/certificates?action=import',
request_body_parameter='trust_object_data',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'details': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'type': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/trust-management/certificates',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'details': 'details',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'type': 'type',
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'Certificate'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'importcertificate': {
'input_type': importcertificate_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'CertificateList'),
'errors': importcertificate_error_dict,
'input_value_validator_list': importcertificate_input_value_validator_list,
'output_validator_list': importcertificate_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'CertificateList'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'importcertificate': importcertificate_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.trust_management.certificates',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _CrlDistributionPointsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'crl_distribution_point': type.ReferenceType('com.vmware.nsx.model_client', 'CrlDistributionPoint'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/trust-management/crl-distribution-points',
request_body_parameter='crl_distribution_point',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'crl_distribution_point_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/trust-management/crl-distribution-points/{crl-distribution-point-id}',
path_variables={
'crl_distribution_point_id': 'crl-distribution-point-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'crl_distribution_point_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/trust-management/crl-distribution-points/{crl-distribution-point-id}',
path_variables={
'crl_distribution_point_id': 'crl-distribution-point-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/trust-management/crl-distribution-points',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'crl_distribution_point_id': type.StringType(),
'crl_distribution_point': type.ReferenceType('com.vmware.nsx.model_client', 'CrlDistributionPoint'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/trust-management/crl-distribution-points/{crl-distribution-point-id}',
request_body_parameter='crl_distribution_point',
path_variables={
'crl_distribution_point_id': 'crl-distribution-point-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'CrlDistributionPoint'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'CrlDistributionPoint'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'CrlDistributionPointList'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'CrlDistributionPoint'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.trust_management.crl_distribution_points',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _CrlsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'crl_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/trust-management/crls/{crl-id}',
path_variables={
'crl_id': 'crl-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'crl_id': type.StringType(),
'details': type.OptionalType(type.BooleanType()),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/trust-management/crls/{crl-id}',
path_variables={
'crl_id': 'crl-id',
},
query_parameters={
'details': 'details',
},
content_type='application/json'
)
# properties for importcrl operation
importcrl_input_type = type.StructType('operation-input', {
'crl_object_data': type.ReferenceType('com.vmware.nsx.model_client', 'CrlObjectData'),
})
importcrl_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
importcrl_input_value_validator_list = [
]
importcrl_output_validator_list = [
]
importcrl_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/trust-management/crls?action=import',
request_body_parameter='crl_object_data',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'details': type.OptionalType(type.BooleanType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'type': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/trust-management/crls',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'details': 'details',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'type': 'type',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'crl_id': type.StringType(),
'crl': type.ReferenceType('com.vmware.nsx.model_client', 'Crl'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/trust-management/crls/{crl-id}',
request_body_parameter='crl',
path_variables={
'crl_id': 'crl-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'Crl'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'importcrl': {
'input_type': importcrl_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'CrlList'),
'errors': importcrl_error_dict,
'input_value_validator_list': importcrl_input_value_validator_list,
'output_validator_list': importcrl_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'CrlList'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'Crl'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'importcrl': importcrl_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.trust_management.crls',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _CsrsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'csr': type.ReferenceType('com.vmware.nsx.model_client', 'Csr'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/trust-management/csrs',
request_body_parameter='csr',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'csr_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/trust-management/csrs/{csr-id}',
path_variables={
'csr_id': 'csr-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'csr_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/trust-management/csrs/{csr-id}',
path_variables={
'csr_id': 'csr-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for importcsr operation
importcsr_input_type = type.StructType('operation-input', {
'csr_id': type.StringType(),
'trust_object_data': type.ReferenceType('com.vmware.nsx.model_client', 'TrustObjectData'),
})
importcsr_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
importcsr_input_value_validator_list = [
]
importcsr_output_validator_list = [
]
importcsr_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/trust-management/csrs/{csr-id}?action=import',
request_body_parameter='trust_object_data',
path_variables={
'csr_id': 'csr-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'cursor': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/trust-management/csrs',
path_variables={
},
query_parameters={
'cursor': 'cursor',
'included_fields': 'included_fields',
'page_size': 'page_size',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
},
content_type='application/json'
)
# properties for selfsign operation
selfsign_input_type = type.StructType('operation-input', {
'csr_id': type.StringType(),
'days_valid': type.IntegerType(),
})
selfsign_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
selfsign_input_value_validator_list = [
]
selfsign_output_validator_list = [
]
selfsign_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/trust-management/csrs/{csr-id}?action=self_sign',
path_variables={
'csr_id': 'csr-id',
},
query_parameters={
'days_valid': 'days_valid',
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'Csr'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'Csr'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'importcsr': {
'input_type': importcsr_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'CertificateList'),
'errors': importcsr_error_dict,
'input_value_validator_list': importcsr_input_value_validator_list,
'output_validator_list': importcsr_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'CsrList'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'selfsign': {
'input_type': selfsign_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'Certificate'),
'errors': selfsign_error_dict,
'input_value_validator_list': selfsign_input_value_validator_list,
'output_validator_list': selfsign_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'importcsr': importcsr_rest_metadata,
'list': list_rest_metadata,
'selfsign': selfsign_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.trust_management.csrs',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _PrincipalIdentitiesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'principal_identity': type.ReferenceType('com.vmware.nsx.model_client', 'PrincipalIdentity'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/trust-management/principal-identities',
request_body_parameter='principal_identity',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'principal_identity_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/trust-management/principal-identities/{principal-identity-id}',
path_variables={
'principal_identity_id': 'principal-identity-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'principal_identity_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/trust-management/principal-identities/{principal-identity-id}',
path_variables={
'principal_identity_id': 'principal-identity-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/trust-management/principal-identities',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for updatecertificate operation
updatecertificate_input_type = type.StructType('operation-input', {
'update_principal_identity_certificate_request': type.ReferenceType('com.vmware.nsx.model_client', 'UpdatePrincipalIdentityCertificateRequest'),
})
updatecertificate_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
updatecertificate_input_value_validator_list = [
]
updatecertificate_output_validator_list = [
]
updatecertificate_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/trust-management/principal-identities?action=update_certificate',
request_body_parameter='update_principal_identity_certificate_request',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'PrincipalIdentity'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'PrincipalIdentity'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'PrincipalIdentityList'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'updatecertificate': {
'input_type': updatecertificate_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'PrincipalIdentity'),
'errors': updatecertificate_error_dict,
'input_value_validator_list': updatecertificate_input_value_validator_list,
'output_validator_list': updatecertificate_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'updatecertificate': updatecertificate_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.trust_management.principal_identities',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class StubFactory(StubFactoryBase):
_attrs = {
'Certificates': Certificates,
'CrlDistributionPoints': CrlDistributionPoints,
'Crls': Crls,
'Csrs': Csrs,
'PrincipalIdentities': PrincipalIdentities,
'crl_distribution_points': 'com.vmware.nsx.trust_management.crl_distribution_points_client.StubFactory',
'principal_identities': 'com.vmware.nsx.trust_management.principal_identities_client.StubFactory',
}
| 44.40572
| 156
| 0.596492
| 9,069
| 91,609
| 5.798434
| 0.039365
| 0.079755
| 0.092705
| 0.114099
| 0.909101
| 0.893394
| 0.872837
| 0.856445
| 0.847393
| 0.839558
| 0
| 0.000745
| 0.296554
| 91,609
| 2,062
| 157
| 44.427255
| 0.815276
| 0.270388
| 0
| 0.727135
| 1
| 0.002268
| 0.322016
| 0.227404
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026455
| false
| 0
| 0.040816
| 0
| 0.100529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0c5c225bea97b848df7068538bc1df5271634638
| 10,326
|
py
|
Python
|
tests/test_rundramatiq_command.py
|
BradleyKirton/django_dramatiq
|
93a4a9ae39aee643cc4a987b18030ad8d1fc8480
|
[
"Apache-2.0"
] | null | null | null |
tests/test_rundramatiq_command.py
|
BradleyKirton/django_dramatiq
|
93a4a9ae39aee643cc4a987b18030ad8d1fc8480
|
[
"Apache-2.0"
] | null | null | null |
tests/test_rundramatiq_command.py
|
BradleyKirton/django_dramatiq
|
93a4a9ae39aee643cc4a987b18030ad8d1fc8480
|
[
"Apache-2.0"
] | null | null | null |
import os
import sys
from io import StringIO
from unittest.mock import patch
from django.core.management import call_command
from django_dramatiq.management.commands import rundramatiq
def test_rundramatiq_command_autodiscovers_modules():
assert rundramatiq.Command().discover_tasks_modules() == [
"django_dramatiq.setup",
"django_dramatiq.tasks",
"tests.testapp1.tasks",
"tests.testapp2.tasks",
"tests.testapp3.tasks.other_tasks",
"tests.testapp3.tasks.tasks",
"tests.testapp3.tasks.utils",
"tests.testapp3.tasks.utils.not_a_task",
]
@patch("os.execvp")
def test_rundramatiq_can_run_dramatiq(execvp_mock):
# Given an output buffer
buff = StringIO()
# When I call the rundramatiq command
call_command("rundramatiq", stdout=buff)
# Then stdout should contain a message about discovered task modules
assert "Discovered tasks module: 'tests.testapp1.tasks'" in buff.getvalue()
assert "Discovered tasks module: 'tests.testapp2.tasks'" in buff.getvalue()
assert "Discovered tasks module: 'tests.testapp3.tasks.tasks'" in buff.getvalue()
assert "Discovered tasks module: 'tests.testapp3.tasks.other_tasks'" in buff.getvalue()
# And execvp should be called with the appropriate arguments
cores = str(rundramatiq.CPU_COUNT)
expected_exec_name = "dramatiq"
expected_exec_path = os.path.join(
os.path.dirname(sys.executable),
expected_exec_name,
)
execvp_mock.assert_called_once_with(expected_exec_path, [
expected_exec_name, "--path", ".", "--processes", cores, "--threads", cores,
"django_dramatiq.setup",
"django_dramatiq.tasks",
"tests.testapp1.tasks",
"tests.testapp2.tasks",
"tests.testapp3.tasks.other_tasks",
"tests.testapp3.tasks.tasks",
"tests.testapp3.tasks.utils",
"tests.testapp3.tasks.utils.not_a_task",
])
@patch("os.execvp")
def test_rundramatiq_can_run_dramatiq_reload(execvp_mock):
# Given an output buffer
buff = StringIO()
# When I call the rundramatiq command with --reload-use-polling
call_command("rundramatiq", "--reload", stdout=buff)
# Then execvp should be called with the appropriate arguments
cores = str(rundramatiq.CPU_COUNT)
expected_exec_name = "dramatiq"
expected_exec_path = os.path.join(
os.path.dirname(sys.executable),
expected_exec_name,
)
execvp_mock.assert_called_once_with(expected_exec_path, [
expected_exec_name, "--path", ".", "--processes", cores, "--threads", cores,
"--watch", ".",
"django_dramatiq.setup",
"django_dramatiq.tasks",
"tests.testapp1.tasks",
"tests.testapp2.tasks",
"tests.testapp3.tasks.other_tasks",
"tests.testapp3.tasks.tasks",
"tests.testapp3.tasks.utils",
"tests.testapp3.tasks.utils.not_a_task",
])
@patch("os.execvp")
def test_rundramatiq_can_run_dramatiq_with_polling(execvp_mock):
# Given an output buffer
buff = StringIO()
# When I call the rundramatiq command with --reload-use-polling
call_command("rundramatiq", "--reload", "--reload-use-polling", stdout=buff)
# Then execvp should be called with the appropriate arguments
cores = str(rundramatiq.CPU_COUNT)
expected_exec_name = "dramatiq"
expected_exec_path = os.path.join(
os.path.dirname(sys.executable),
expected_exec_name,
)
execvp_mock.assert_called_once_with(expected_exec_path, [
expected_exec_name, "--path", ".", "--processes", cores, "--threads", cores,
"--watch", ".",
"--watch-use-polling",
"django_dramatiq.setup",
"django_dramatiq.tasks",
"tests.testapp1.tasks",
"tests.testapp2.tasks",
"tests.testapp3.tasks.other_tasks",
"tests.testapp3.tasks.tasks",
"tests.testapp3.tasks.utils",
"tests.testapp3.tasks.utils.not_a_task",
])
@patch("os.execvp")
def test_rundramatiq_can_run_dramatiq_with_only_some_queues(execvp_mock):
# Given an output buffer
buff = StringIO()
# When I call the rundramatiq command with --queues
call_command("rundramatiq", "--queues", "A B C", stdout=buff)
# Then execvp should be called with the appropriate arguments
cores = str(rundramatiq.CPU_COUNT)
expected_exec_name = "dramatiq"
expected_exec_path = os.path.join(
os.path.dirname(sys.executable),
expected_exec_name,
)
execvp_mock.assert_called_once_with(expected_exec_path, [
expected_exec_name, "--path", ".", "--processes", cores, "--threads", cores,
"django_dramatiq.setup",
"django_dramatiq.tasks",
"tests.testapp1.tasks",
"tests.testapp2.tasks",
"tests.testapp3.tasks.other_tasks",
"tests.testapp3.tasks.tasks",
"tests.testapp3.tasks.utils",
"tests.testapp3.tasks.utils.not_a_task",
"--queues", "A B C"
])
@patch("os.execvp")
def test_rundramatiq_can_run_dramatiq_with_specified_pid_file(execvp_mock):
# Given an output buffer
buff = StringIO()
# When I call the rundramatiq command with --pid-file
call_command("rundramatiq", "--pid-file", "drama.pid", stdout=buff)
# Then execvp should be called with the appropriate arguments
cores = str(rundramatiq.CPU_COUNT)
expected_exec_name = "dramatiq"
expected_exec_path = os.path.join(
os.path.dirname(sys.executable),
expected_exec_name,
)
execvp_mock.assert_called_once_with(expected_exec_path, [
expected_exec_name, "--path", ".", "--processes", cores, "--threads", cores,
"django_dramatiq.setup",
"django_dramatiq.tasks",
"tests.testapp1.tasks",
"tests.testapp2.tasks",
"tests.testapp3.tasks.other_tasks",
"tests.testapp3.tasks.tasks",
"tests.testapp3.tasks.utils",
"tests.testapp3.tasks.utils.not_a_task",
"--pid-file", "drama.pid"
])
@patch("os.execvp")
def test_rundramatiq_can_run_dramatiq_with_specified_log_file(execvp_mock):
# Given an output buffer
buff = StringIO()
# When I call the rundramatiq command with --log-file
call_command("rundramatiq", "--log-file", "drama.log", stdout=buff)
# Then execvp should be called with the appropriate arguments
cores = str(rundramatiq.CPU_COUNT)
expected_exec_name = "dramatiq"
expected_exec_path = os.path.join(
os.path.dirname(sys.executable),
expected_exec_name,
)
execvp_mock.assert_called_once_with(expected_exec_path, [
expected_exec_name, "--path", ".", "--processes", cores, "--threads", cores,
"django_dramatiq.setup",
"django_dramatiq.tasks",
"tests.testapp1.tasks",
"tests.testapp2.tasks",
"tests.testapp3.tasks.other_tasks",
"tests.testapp3.tasks.tasks",
"tests.testapp3.tasks.utils",
"tests.testapp3.tasks.utils.not_a_task",
"--log-file", "drama.log"
])
@patch("os.execvp")
def test_rundramatiq_can_ignore_modules(execvp_mock, settings):
# Given an output buffer
buff = StringIO()
# And 'tests.testapp2.tasks' in DRAMATIQ_IGNORED_MODULES
# And 'tests.testapp3.tasks.other_tasks' in DRAMATIQ_IGNORED_MODULES
settings.DRAMATIQ_IGNORED_MODULES = (
"tests.testapp2.tasks",
"tests.testapp3.tasks.other_tasks",
"tests.testapp3.tasks.utils.*",
)
# When I call the rundramatiq command
call_command("rundramatiq", stdout=buff)
# Then stdout should contain a message about ignored task modules
assert "Discovered tasks module: 'tests.testapp1.tasks'" in buff.getvalue()
assert "Discovered tasks module: 'tests.testapp3.tasks.tasks'" in buff.getvalue()
assert "Discovered tasks module: 'tests.testapp3.tasks.utils'" in buff.getvalue()
assert "Ignored tasks module: 'tests.testapp2.tasks'" in buff.getvalue()
assert "Ignored tasks module: 'tests.testapp3.tasks.other_tasks'" in buff.getvalue()
assert "Ignored tasks module: 'tests.testapp3.tasks.utils.not_a_task'" in buff.getvalue()
# And execvp should be called with the appropriate arguments
cores = str(rundramatiq.CPU_COUNT)
expected_exec_name = "dramatiq"
expected_exec_path = os.path.join(
os.path.dirname(sys.executable),
expected_exec_name,
)
execvp_mock.assert_called_once_with(expected_exec_path, [
expected_exec_name, "--path", ".", "--processes", cores, "--threads", cores,
"django_dramatiq.setup",
"django_dramatiq.tasks",
"tests.testapp1.tasks",
"tests.testapp3.tasks.tasks",
"tests.testapp3.tasks.utils",
])
@patch("os.execvp")
def test_rundramatiq_can_fork(execvp_mock, settings):
# Given an output buffer
buff = StringIO()
# When I call the rundramatiq command with --log-file
call_command("rundramatiq", "--fork-function", "a", "--fork-function", "b", stdout=buff)
# Then execvp should be called with the appropriate arguments
cores = str(rundramatiq.CPU_COUNT)
expected_exec_name = "dramatiq"
expected_exec_path = os.path.join(
os.path.dirname(sys.executable),
expected_exec_name,
)
execvp_mock.assert_called_once_with(expected_exec_path, [
expected_exec_name, "--path", ".", "--processes", cores, "--threads", cores,
"--fork-function", "a",
"--fork-function", "b",
"django_dramatiq.setup",
"django_dramatiq.tasks",
"tests.testapp1.tasks",
"tests.testapp2.tasks",
"tests.testapp3.tasks.other_tasks",
"tests.testapp3.tasks.tasks",
"tests.testapp3.tasks.utils",
"tests.testapp3.tasks.utils.not_a_task",
])
def test_rundramatiq_command_autodiscovers_additional_modules(settings):
settings.DRAMATIQ_AUTODISCOVER_MODULES = ("services", )
assert rundramatiq.Command().discover_tasks_modules() == [
"django_dramatiq.setup",
"django_dramatiq.tasks",
"tests.testapp1.tasks",
"tests.testapp2.tasks",
"tests.testapp3.tasks.other_tasks",
"tests.testapp3.tasks.tasks",
"tests.testapp3.tasks.utils",
"tests.testapp3.tasks.utils.not_a_task",
"tests.testapp4.services",
]
| 35.122449
| 93
| 0.673833
| 1,228
| 10,326
| 5.456026
| 0.079805
| 0.074627
| 0.126269
| 0.106418
| 0.92
| 0.900299
| 0.887313
| 0.875224
| 0.871194
| 0.850299
| 0
| 0.008846
| 0.200852
| 10,326
| 294
| 94
| 35.122449
| 0.803078
| 0.127445
| 0
| 0.775229
| 0
| 0
| 0.361723
| 0.214095
| 0
| 0
| 0
| 0
| 0.091743
| 1
| 0.045872
| false
| 0
| 0.027523
| 0
| 0.073395
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7d4fbe6bb2f93758e5bbd91a20e5e655a876720
| 7,386
|
py
|
Python
|
test_undirect_graf.py
|
rodrigondec/Grafos
|
dd3bb7ffd56909395cc211c6d68f9e3eaa5fa9ba
|
[
"Unlicense"
] | null | null | null |
test_undirect_graf.py
|
rodrigondec/Grafos
|
dd3bb7ffd56909395cc211c6d68f9e3eaa5fa9ba
|
[
"Unlicense"
] | null | null | null |
test_undirect_graf.py
|
rodrigondec/Grafos
|
dd3bb7ffd56909395cc211c6d68f9e3eaa5fa9ba
|
[
"Unlicense"
] | null | null | null |
from grafo import Grafo, DiGrafo
from no import No
from aresta import Aresta
import unittest
class TestStringMethods(unittest.TestCase):
def setUp(self):
self.grafo = Grafo()
def test_atingivel(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.grafo.insertNo(No(7))
self.assertEqual(len(self.grafo.nos), 7)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 2))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(3, 4))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 7)
self.assertEqual(self.grafo.atingivel(1, 6), True)
self.assertEqual(self.grafo.atingivel(1, 7), False)
def test_caminho(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.grafo.insertNo(No(7))
self.assertEqual(len(self.grafo.nos), 7)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 2))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(3, 4))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 7)
self.assertEqual(self.grafo.caminho(1, 6), [1, 5, 4, 6])
self.assertEqual(self.grafo.caminho(1, 3), [1, 2, 3])
def test_conexo(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.assertEqual(len(self.grafo.nos), 6)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 2))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(3, 4))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 7)
self.assertEqual(self.grafo.conexo(), True)
self.grafo.insertNo(No(7))
self.assertEqual(len(self.grafo.nos), 7)
self.assertEqual(self.grafo.conexo(), False)
def test_ciclico_true(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.assertEqual(len(self.grafo.nos), 6)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 2))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(3, 4))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 7)
self.assertEqual(self.grafo.ciclico(), True)
def test_ciclico_false(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.assertEqual(len(self.grafo.nos), 6)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 5)
print self.grafo
self.assertEqual(self.grafo.ciclico(), False)
def test_ciclico_n_conexo_true(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.grafo.insertNo(No(7))
self.grafo.insertNo(No(8))
self.grafo.insertNo(No(9))
self.grafo.insertNo(No(10))
self.assertEqual(len(self.grafo.nos), 10)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(7, 6))
self.grafo.insertAresta(Aresta(8, 9))
self.grafo.insertAresta(Aresta(9, 10))
self.grafo.insertAresta(Aresta(8, 10))
self.assertEqual(len(self.grafo.arestas), 8)
self.assertEqual(self.grafo.ciclico(), True)
def test_ciclico_n_conexo_false(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.grafo.insertNo(No(7))
self.grafo.insertNo(No(8))
self.grafo.insertNo(No(9))
self.grafo.insertNo(No(10))
self.assertEqual(len(self.grafo.nos), 10)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(7, 6))
self.grafo.insertAresta(Aresta(8, 9))
self.grafo.insertAresta(Aresta(9, 10))
self.assertEqual(len(self.grafo.arestas), 7)
self.assertEqual(self.grafo.ciclico(), False)
def test_num_componentes(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.assertEqual(len(self.grafo.nos), 5)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.assertEqual(len(self.grafo.arestas), 4)
self.assertEqual(self.grafo.num_componentes(), 1)
self.grafo.insertNo(No(6))
self.grafo.insertNo(No(7))
self.assertEqual(len(self.grafo.nos), 7)
self.grafo.insertAresta(Aresta(7, 6))
self.assertEqual(len(self.grafo.arestas), 5)
self.assertEqual(self.grafo.num_componentes(), 2)
self.grafo.insertNo(No(8))
self.grafo.insertNo(No(9))
self.grafo.insertNo(No(10))
self.assertEqual(len(self.grafo.nos), 10)
self.grafo.insertAresta(Aresta(8, 9))
self.grafo.insertAresta(Aresta(9, 10))
self.grafo.insertAresta(Aresta(8, 10))
self.assertEqual(len(self.grafo.arestas), 8)
self.assertEqual(self.grafo.num_componentes(), 3)
def test_bfs(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.assertEqual(len(self.grafo.nos), 6)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 2))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(3, 4))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 7)
self.grafo.bfs(1)
def test_dfs(self):
self.grafo.insertNo(No(1))
self.grafo.insertNo(No(2))
self.grafo.insertNo(No(3))
self.grafo.insertNo(No(4))
self.grafo.insertNo(No(5))
self.grafo.insertNo(No(6))
self.assertEqual(len(self.grafo.nos), 6)
self.grafo.insertAresta(Aresta(1, 2))
self.grafo.insertAresta(Aresta(1, 5))
self.grafo.insertAresta(Aresta(5, 2))
self.grafo.insertAresta(Aresta(5, 4))
self.grafo.insertAresta(Aresta(2, 3))
self.grafo.insertAresta(Aresta(3, 4))
self.grafo.insertAresta(Aresta(4, 6))
self.assertEqual(len(self.grafo.arestas), 7)
self.grafo.dfs(1)
if __name__ == '__main__':
unittest.main()
| 29.544
| 58
| 0.716084
| 1,154
| 7,386
| 4.557192
| 0.038995
| 0.320023
| 0.242442
| 0.270964
| 0.941624
| 0.93706
| 0.887241
| 0.887051
| 0.869557
| 0.86138
| 0
| 0.04103
| 0.105741
| 7,386
| 250
| 59
| 29.544
| 0.755185
| 0
| 0
| 0.839024
| 0
| 0
| 0.001083
| 0
| 0
| 0
| 0
| 0
| 0.185366
| 0
| null | null | 0
| 0.019512
| null | null | 0.004878
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
ac1b3ebd8ffb64179ceb128585149b4d27bf039c
| 575
|
py
|
Python
|
content_f_conditions/ex45_multiple_conditions.py
|
Alyssonmach/learning-python-with-codes
|
e5ef70f3b56712e98449b3053eb34416b8025cb1
|
[
"MIT"
] | 3
|
2020-11-28T08:26:54.000Z
|
2020-12-23T18:37:37.000Z
|
content_f_conditions/ex45_multiple_conditions.py
|
Alyssonmach/learning-python-with-codes
|
e5ef70f3b56712e98449b3053eb34416b8025cb1
|
[
"MIT"
] | 1
|
2021-02-12T12:17:49.000Z
|
2021-02-12T12:17:49.000Z
|
content_f_conditions/ex45_multiple_conditions.py
|
Alyssonmach/learning-python-with-codes
|
e5ef70f3b56712e98449b3053eb34416b8025cb1
|
[
"MIT"
] | null | null | null |
requested_toppings = ['mushrooms', 'extra cheese']
if 'mushrooms' in requested_toppings:
print("Adding mushrooms.")
if 'pepperoni' in requested_toppings:
print("Adding pepperoni.")
if 'extra cheese' in requested_toppings:
print("Adding extra cheese.")
print("\nFinished making your first pizza!")
if 'mushrooms' in requested_toppings:
print("Adding mushrooms.")
elif 'pepperoni' in requested_toppings:
print("Adding pepperoni.")
elif 'extra cheese' in requested_toppings:
print("Adding extra cheese.")
print("\nFinished making your second pizza!")
| 30.263158
| 50
| 0.73913
| 69
| 575
| 6.057971
| 0.246377
| 0.284689
| 0.272727
| 0.344498
| 0.832536
| 0.832536
| 0.832536
| 0.602871
| 0.363636
| 0.363636
| 0
| 0
| 0.142609
| 575
| 19
| 51
| 30.263158
| 0.84787
| 0
| 0
| 0.533333
| 0
| 0
| 0.451389
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.533333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
ac3ca086610d59e10a3cca75b54708abf363a598
| 6,194
|
py
|
Python
|
Script/WDI_writer_functions.py
|
Riemer1818/Cattlelyst_wikibase_2021
|
1f3e3199391844206e6621e63756461bf984bf36
|
[
"MIT"
] | null | null | null |
Script/WDI_writer_functions.py
|
Riemer1818/Cattlelyst_wikibase_2021
|
1f3e3199391844206e6621e63756461bf984bf36
|
[
"MIT"
] | null | null | null |
Script/WDI_writer_functions.py
|
Riemer1818/Cattlelyst_wikibase_2021
|
1f3e3199391844206e6621e63756461bf984bf36
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from wikidataintegrator import wdi_core, wdi_login
import logging
import pickle
__author__ = "Riemer van der Vliet"
__copyright__ = "Copyright 2020, Laboratory of Systems and Synthetic Biology"
__credits__ = ["Riemer van der Vliet", "Jasper Koehorst"]
__license__ = "GPL"
__version__ = "2.0.0"
__maintainer__ = "Riemer van der Vliet"
__email__ = "riemer.vandervliet@wur.nl"
__status__ = "Development"
"""
functions used by WDI writer file
"""
def get_properties(endpoint_url: str) -> dict:
"""Finds properties on the endpoint url and returns the IDs
:param endpoint_url: Wikibase SPARQL endpoint
:return: Property lookup dictionary of key property string and value property ID of Wikibase
"""
# placeholder for dictionary
property_lookup = {}
# creates query
query = """SELECT ?property ?label WHERE {
?property a wikibase:Property .
?property rdfs:label ?label .
FILTER (LANG(?label) = "en" )}
"""
# gets results
results = wdi_core.WDItemEngine.execute_sparql_query(query=query, endpoint=endpoint_url)
# iterates iterates data
for result in results["results"]["bindings"]:
label = result["label"]["value"].split("/")[-1]
property_lookup[label] = result["property"]["value"].split("/")[-1]
return property_lookup
def get_items(items: list, endpoint_url: str) -> dict:
"""Gets the IDs for each of the items in the item list. First tries to find it in the pickle file.
:param items: list of items of which IDs need to be traced
:param endpoint_url: Wikibase SPARQL endpoint
:return: item_lookup dictionary of key item string and value item ID of Wikibase
"""
if os.path.isfile("../Parts/item_lookup.pickle"):
with open('../Parts/item_lookup.pickle', 'rb') as handle:
item_lookup = pickle.load(handle)
else:
item_lookup = {}
for item_x in items:
logging.info("Retrieving item " + item_x)
if item_x in item_lookup: continue
item_lookup[item_x] = get_item_by_name(item_x, endpoint_url)
with open('../Parts/item_lookup.pickle', 'wb') as handle:
pickle.dump(item_lookup, handle, protocol=pickle.DEFAULT_PROTOCOL)
return item_lookup
def get_item_by_name(label: str, endpoint_url: str) -> str or None:
"""Finds items on the endpoint url and returns the IDs
:param label: Item label
:param endpoint_url: Wikibase SPARQL endpoint
:return: string of Wikibase ID or None
"""
# set query
query = """
SELECT DISTINCT ?item WHERE {
VALUES ?label { \"""" + label + """\"@en }
?item rdfs:label ?label .
}"""
# get results
try:
results = wdi_core.WDItemEngine.execute_sparql_query(query, endpoint=endpoint_url)
except:
print("Query failed: ")
raise Exception("Query failed")
# parse and return results
for result in results["results"]["bindings"]:
return result["item"]["value"].split("/")[-1]
return None
def prepare(items: list, endpoint_url: str) -> list:
"""Returns a list of lists of items ID and property IDs
:param items: list of items of which IDs need to be traced
:param endpoint_url: Wikibase SPARQL endpoint
:return: list of item dictionary and of property dictionary
"""
return [get_items(items, endpoint_url), get_properties(endpoint_url)]
def get_properties(endpoint_url: str) -> dict:
"""Finds properties on the endpoint url and returns the IDs
:param endpoint_url: Wikibase SPARQL endpoint
:return: property_lookup dictionary of key property string and value property ID of Wikibase
"""
# placeholder for dictionary
property_lookup = {}
# set query
query = """SELECT ?property ?label WHERE {
?property a wikibase:Property .
?property rdfs:label ?label .
FILTER (LANG(?label) = "en" )}
"""
# get results
results = wdi_core.WDItemEngine.execute_sparql_query(query=query, endpoint=endpoint_url)
# parse results
for result in results["results"]["bindings"]:
label = result["label"]["value"].split("/")[-1]
property_lookup[label] = result["property"]["value"].split("/")[-1]
return property_lookup
def get_items(items: list, endpoint_url: str) -> dict:
"""Gets the IDs for each of the items in the item list. First tries to find it in the pickle file.
:param items: list of items of which IDs need to be traced
:param endpoint_url: Wikibase SPARQL endpoint
:return: item_lookup dictionary with item strings and value IDs
"""
# checks if there is a pickle file under name item_lookup.pickle,
# otherwise creates dictionary placeholder
if os.path.isfile("../Parts/item_lookup.pickle"):
with open('../Parts/item_lookup.pickle', 'rb') as handle:
item_lookup = pickle.load(handle)
else:
item_lookup = {}
# iterates items and gets the item ID by name
for item_x in items:
logging.info("Retrieving item " + item_x)
if item_x in item_lookup: continue
item_lookup[item_x] = get_item_by_name(item_x, endpoint_url)
# dumps object as pickle file
with open('../Parts/item_lookup.pickle', 'wb') as handle:
pickle.dump(item_lookup, handle, protocol=pickle.DEFAULT_PROTOCOL)
return item_lookup
def get_item_by_name(label: str, endpoint_url: str) -> str or bool:
"""Finds items on the endpoint url and returns the IDs
:param label: Item label
:param endpoint_url: Wikibase SPARQL endpoint
:return: result string of wikibase ID or None
"""
# sets query
query = """
SELECT DISTINCT ?item WHERE {
VALUES ?label { \"""" + label + """\"@en }
?item rdfs:label ?label .
}"""
# gets results
try:
results = wdi_core.WDItemEngine.execute_sparql_query(query, endpoint=endpoint_url)
except:
print("Query failed: ")
raise Exception("Query failed")
# iterates results
for result in results["results"]["bindings"]:
return result["item"]["value"].split("/")[-1]
return None
| 30.512315
| 102
| 0.66516
| 813
| 6,194
| 4.912669
| 0.186962
| 0.071607
| 0.036054
| 0.042063
| 0.800451
| 0.794692
| 0.782674
| 0.780921
| 0.780921
| 0.780921
| 0
| 0.002923
| 0.226671
| 6,194
| 202
| 103
| 30.663366
| 0.830898
| 0.314659
| 0
| 0.826087
| 0
| 0
| 0.546805
| 0.187717
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076087
| false
| 0
| 0.043478
| 0
| 0.217391
| 0.021739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ac460ebb2a0293670e0c132534dbc9da8b9efb13
| 5,588
|
py
|
Python
|
rio_tiler/mosaic/methods/defaults.py
|
vincentsarago/rio-tiler
|
21022a0766009a64acf0038dc6adae33d9831a41
|
[
"BSD-3-Clause"
] | 77
|
2017-10-12T18:17:14.000Z
|
2019-01-17T15:39:24.000Z
|
rio_tiler/mosaic/methods/defaults.py
|
vincentsarago/rio-tiler
|
21022a0766009a64acf0038dc6adae33d9831a41
|
[
"BSD-3-Clause"
] | 40
|
2017-10-17T08:31:51.000Z
|
2019-01-11T22:00:44.000Z
|
rio_tiler/mosaic/methods/defaults.py
|
vincentsarago/rio-tiler
|
21022a0766009a64acf0038dc6adae33d9831a41
|
[
"BSD-3-Clause"
] | 23
|
2017-10-13T21:41:08.000Z
|
2019-01-09T06:08:27.000Z
|
"""rio_tiler.mosaic.methods.defaults: default mosaic filling methods."""
import numpy
from .base import MosaicMethodBase
class FirstMethod(MosaicMethodBase):
"""Feed the mosaic tile with the first pixel available."""
def __init__(self):
"""Overwrite base and init First method."""
super(FirstMethod, self).__init__()
self.exit_when_filled = True
def feed(self, tile):
"""Add data to tile."""
if self.tile is None:
self.tile = tile
pidex = self.tile.mask & ~tile.mask
mask = numpy.where(pidex, tile.mask, self.tile.mask)
self.tile = numpy.ma.where(pidex, tile, self.tile)
self.tile.mask = mask
class HighestMethod(MosaicMethodBase):
"""Feed the mosaic tile with the highest pixel values."""
def feed(self, tile):
"""Add data to tile."""
if self.tile is None:
self.tile = tile
pidex = (
numpy.bitwise_and(tile.data > self.tile.data, ~tile.mask) | self.tile.mask
)
mask = numpy.where(pidex, tile.mask, self.tile.mask)
self.tile = numpy.ma.where(pidex, tile, self.tile)
self.tile.mask = mask
class LowestMethod(MosaicMethodBase):
"""Feed the mosaic tile with the lowest pixel values."""
def feed(self, tile):
"""Add data to tile."""
if self.tile is None:
self.tile = tile
pidex = (
numpy.bitwise_and(tile.data < self.tile.data, ~tile.mask) | self.tile.mask
)
mask = numpy.where(pidex, tile.mask, self.tile.mask)
self.tile = numpy.ma.where(pidex, tile, self.tile)
self.tile.mask = mask
class MeanMethod(MosaicMethodBase):
"""Stack the tiles and return the Mean pixel value."""
def __init__(self, enforce_data_type=True):
"""Overwrite base and init Mean method."""
super(MeanMethod, self).__init__()
self.enforce_data_type = enforce_data_type
self.tile = []
@property
def data(self):
"""Return data and mask."""
if self.tile:
tile = numpy.ma.mean(numpy.ma.stack(self.tile, axis=0), axis=0)
if self.enforce_data_type:
tile = tile.astype(self.tile[0].dtype)
return tile.data, (~tile.mask[0] * 255).astype(tile.dtype)
else:
return None, None
def feed(self, tile):
"""Add data to tile."""
self.tile.append(tile)
class MedianMethod(MosaicMethodBase):
"""Stack the tiles and return the Median pixel value."""
def __init__(self, enforce_data_type=True):
"""Overwrite base and init Median method."""
super(MedianMethod, self).__init__()
self.enforce_data_type = enforce_data_type
self.tile = []
@property
def data(self):
"""Return data and mask."""
if self.tile:
tile = numpy.ma.median(numpy.ma.stack(self.tile, axis=0), axis=0)
if self.enforce_data_type:
tile = tile.astype(self.tile[0].dtype)
return tile.data, (~tile.mask[0] * 255).astype(tile.dtype)
else:
return None, None
def feed(self, tile):
"""Create a stack of tile."""
self.tile.append(tile)
class StdevMethod(MosaicMethodBase):
"""Stack the tiles and return the Standard Deviation value."""
def __init__(self, enforce_data_type=True):
"""Overwrite base and init Stdev method."""
super(StdevMethod, self).__init__()
self.tile = []
@property
def data(self):
"""Return data and mask."""
if self.tile:
tile = numpy.ma.std(numpy.ma.stack(self.tile, axis=0), axis=0)
return tile.data, (~tile.mask[0] * 255).astype(tile.dtype)
else:
return None, None
def feed(self, tile):
"""Add data to tile."""
self.tile.append(tile)
class LastBandHigh(MosaicMethodBase):
"""Feed the mosaic tile using the last band as decision factor."""
@property
def data(self):
"""Return data and mask."""
if self.tile is not None:
return (
self.tile.data[:-1],
(~self.tile.mask[0] * 255).astype(self.tile.dtype),
)
else:
return None, None
def feed(self, tile: numpy.ma.MaskedArray):
"""Add data to tile."""
if self.tile is None:
self.tile = tile
return
pidex = (
numpy.bitwise_and(tile.data[-1] > self.tile.data[-1], ~tile.mask)
| self.tile.mask
)
mask = numpy.where(pidex, tile.mask, self.tile.mask)
self.tile = numpy.ma.where(pidex, tile, self.tile)
self.tile.mask = mask
class LastBandLow(MosaicMethodBase):
"""Feed the mosaic tile using the last band as decision factor."""
@property
def data(self):
"""Return data and mask."""
if self.tile is not None:
return (
self.tile.data[:-1],
(~self.tile.mask[0] * 255).astype(self.tile.dtype),
)
else:
return None, None
def feed(self, tile: numpy.ma.MaskedArray):
"""Add data to tile."""
if self.tile is None:
self.tile = tile
return
pidex = (
numpy.bitwise_and(tile.data[-1] < self.tile.data[-1], ~tile.mask)
| self.tile.mask
)
mask = numpy.where(pidex, tile.mask, self.tile.mask)
self.tile = numpy.ma.where(pidex, tile, self.tile)
self.tile.mask = mask
| 29.256545
| 86
| 0.573193
| 707
| 5,588
| 4.449788
| 0.120226
| 0.175461
| 0.064844
| 0.071202
| 0.850922
| 0.850922
| 0.842339
| 0.765099
| 0.765099
| 0.755563
| 0
| 0.008678
| 0.298855
| 5,588
| 190
| 87
| 29.410526
| 0.794283
| 0.163386
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.141667
| false
| 0
| 0.016667
| 0
| 0.325
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ac6fc1b210632046a04f35464d2d89383a795143
| 876
|
py
|
Python
|
N-Gram/PlotUtils.py
|
FindTheTruth/Natural-Language-Processing
|
a52c777e505dd5ccd9f892fbf98ba50d4c29b31b
|
[
"Apache-2.0"
] | 1
|
2022-03-23T09:26:59.000Z
|
2022-03-23T09:26:59.000Z
|
N-Gram/PlotUtils.py
|
FindTheTruth/Natural-Language-Processing
|
a52c777e505dd5ccd9f892fbf98ba50d4c29b31b
|
[
"Apache-2.0"
] | null | null | null |
N-Gram/PlotUtils.py
|
FindTheTruth/Natural-Language-Processing
|
a52c777e505dd5ccd9f892fbf98ba50d4c29b31b
|
[
"Apache-2.0"
] | null | null | null |
import matplotlib.pyplot as plt
x = ["N=1", "N=2", "N=3", "N=4", "N=5","N=6"]
y = [0.9365, 0.9865, 0.9895, 0.9950,0.9880,0.9615]
rects = plt.barh(x, y, color=["red", "blue", "purple", "violet", "green", "black"])
for rect in rects: # rects 是三根柱子的集合
width = rect.get_width()
print(width)
plt.text(width, rect.get_y() + rect.get_height() / 2, str(width), size=10)
plt.xlim(0.0,1.3)
# plt.legend()
plt.show()
x = ["k=1e-5","k=1e-4", "k=1e-3", "k=1e-2", "k=1e-1", "k=1.0"]
y = [0.9895, 0.9900, 0.9950, 0.9885,0.9740,0.831]
# y = [0.9365, 0.9865, 0.9895, 0.9950,0.9880,0.9615]
rects = plt.barh(x, y, color=["red", "blue", "purple", "violet", "green", "black"])
for rect in rects: # rects 是三根柱子的集合
width = rect.get_width()
print(width)
plt.text(width, rect.get_y() + rect.get_height() / 2, str(width), size=10)
plt.xlim(0.0,1.3)
# plt.legend()
plt.show()
| 36.5
| 83
| 0.589041
| 172
| 876
| 2.965116
| 0.296512
| 0.082353
| 0.094118
| 0.027451
| 0.815686
| 0.815686
| 0.815686
| 0.815686
| 0.815686
| 0.815686
| 0
| 0.162198
| 0.148402
| 876
| 24
| 84
| 36.5
| 0.521448
| 0.121005
| 0
| 0.736842
| 0
| 0
| 0.145098
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052632
| 0
| 0.052632
| 0.105263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3bd6298a19903f15f7e907c194b8869777800558
| 21,160
|
py
|
Python
|
model_blocks/tests.py
|
aptivate/django-model-blocks
|
5057ed57887683d777f04c95d67d268d21a18c02
|
[
"BSD-3-Clause"
] | 6
|
2015-01-20T08:43:44.000Z
|
2020-08-13T01:57:10.000Z
|
model_blocks/tests.py
|
techdragon/django-model-blocks
|
8175d7353d792cb720b4ac356f4538888bf7747c
|
[
"BSD-3-Clause"
] | 1
|
2016-10-16T17:35:07.000Z
|
2016-10-16T17:35:07.000Z
|
model_blocks/tests.py
|
techdragon/django-model-blocks
|
8175d7353d792cb720b4ac356f4538888bf7747c
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Test the model blocks
"""
import datetime
from django.test import TestCase
from mock import Mock
from django.db.models import Model, IntegerField, DateTimeField, CharField
from django.template import Context, Template, TemplateSyntaxError
from example_project.pepulator_factory.models import Pepulator, Distributor
from model_blocks.templatetags import model_filters
from model_blocks.templatetags import model_nodes
class DetailBlockFilterTest (TestCase):
fixtures = ['pepulator_factory_data.json']
def setUp(self):
# Mock Django's get_template so that it doesn't load a real file;
# instead just return a template that allows us to verify the context
model_nodes.get_template = Mock(
return_value=Template(('{{ title|default_if_none:instance|safe }}:{{ model|safe }},'
'{% for name, label, value, is_list, is_link in fields %}'
'{{ name|safe }},'
'{{ label|safe }},'
'{% if not is_list %}'
'{% if is_link %}'
'@{{ value }}'
'{% else %}'
'{{ value|safe }}'
'{% endif %}'
'{% else %}'
'[{% for item in value.all %}{{ item|safe }},{% endfor %}]'
'{% endif %},'
'{% endfor %}')))
def test_model_format(self):
"""Tests that a given model is formatted as expected."""
pepulator = Pepulator.objects.get(serial_number=1235)
expected_detail = (u"Pepulator #1235:pepulator,"
"serial_number,serial number,1235,"
"height,height,12,"
"width,width,15,"
"manufacture_date,manufacture date,2011-06-10 11:12:33,"
"color,color,red,"
"address,address,@ppr://1235/,"
"distributed_by,distributed by,Walmart,"
"knuckles,knuckles,[Knuckle of hardness 2.35,Knuckle of hardness 1.10,],"
"jambs,jambs,[],"
)
detail = model_filters.as_detail_block(pepulator)
model_nodes.get_template.assert_called_with('model_blocks/object_detail.html')
self.assertEqual(detail, expected_detail)
def test_filter_is_registered(self):
"""Test that the filter can be used from within a template"""
template = Template(('{% load model_filters %}'
'{{ pepulator|as_detail_block }}'))
pepulator = Pepulator.objects.get(serial_number=1235)
context = Context({'pepulator':pepulator})
expected_detail = (u"Pepulator #1235:pepulator,"
"serial_number,serial number,1235,"
"height,height,12,"
"width,width,15,"
"manufacture_date,manufacture date,2011-06-10 11:12:33,"
"color,color,red,"
"address,address,@ppr://1235/,"
"distributed_by,distributed by,Walmart,"
"knuckles,knuckles,[Knuckle of hardness 2.35,Knuckle of hardness 1.10,],"
"jambs,jambs,[],"
)
detail = template.render(context)
model_nodes.get_template.assert_called_with('model_blocks/object_detail.html')
self.assertEqual(detail, expected_detail)
def test_title_is_used(self):
"""Test that a title is used if provided"""
template = Template(('{% load model_filters %}'
'{{ pepulator|as_detail_block:"My Pepulator" }}'))
pepulator = Pepulator.objects.get(serial_number=1235)
context = Context({'pepulator':pepulator})
expected_detail = (u"My Pepulator:pepulator,"
"serial_number,serial number,1235,"
"height,height,12,"
"width,width,15,"
"manufacture_date,manufacture date,2011-06-10 11:12:33,"
"color,color,red,"
"address,address,@ppr://1235/,"
"distributed_by,distributed by,Walmart,"
"knuckles,knuckles,[Knuckle of hardness 2.35,Knuckle of hardness 1.10,],"
"jambs,jambs,[],"
)
detail = template.render(context)
model_nodes.get_template.assert_called_with('model_blocks/object_detail.html')
self.assertEqual(detail, expected_detail)
def test_related_fields(self):
"""Tests that related fields not defined on the model are included."""
pepulator = Distributor.objects.get(name="Mom & Pop")
expected_detail = (u"Mom & Pop:distributor,"
"name,name,Mom & Pop,"
"capacity,capacity,175,"
"stock,stock,[Pepulator #1238,],"
)
detail = model_filters.as_detail_block(pepulator)
model_nodes.get_template.assert_called_with('model_blocks/object_detail.html')
self.assertEqual(detail, expected_detail)
class TeaserBlockFilterTest (TestCase):
fixtures = ['pepulator_factory_data.json']
def setUp(self):
# Mock Django's get_template so that it doesn't load a real file;
# instead just return a template that allows us to verify the context
model_nodes.get_template = Mock(
return_value=Template('{{ title|default_if_none:instance|safe }}:{{ model|safe }},{% for name, label, value, is_list in fields %}{{ name|safe }},{{ label|safe }},{% if not is_list %}{{ value|safe }}{% else %}[{% for item in value.all %}{{ item|safe }},{% endfor %}]{% endif %},{% endfor %}'))
def test_model_format(self):
"""Tests that a given model is formatted as expected."""
pepulator = Pepulator.objects.get(serial_number=1235)
expected_teaser = (u"Pepulator #1235:pepulator,"
"serial_number,serial number,1235,"
"height,height,12,"
"width,width,15,"
"manufacture_date,manufacture date,2011-06-10 11:12:33,"
"color,color,red,"
"address,address,ppr://1235/,"
"distributed_by,distributed by,Walmart,"
"knuckles,knuckles,[Knuckle of hardness 2.35,Knuckle of hardness 1.10,],"
"jambs,jambs,[],"
)
teaser = model_filters.as_teaser_block(pepulator)
model_nodes.get_template.assert_called_with('model_blocks/object_teaser.html')
self.assertEqual(teaser, expected_teaser)
class ListBlockFilterTest (TestCase):
fixtures = ['pepulator_factory_data.json']
def setUp(self):
# Mock Django's get_template so that it doesn't load a real file;
# instead just return a template that allows us to verify the context
model_nodes.get_template = Mock(
return_value=Template('{{ title|default_if_none:model|capfirst }}{% if not title %}s{% endif %}:{{ instance_list|safe }}'))
def test_list_format(self):
"""Tests that a given model is formatted as expected."""
pepulator_list = Pepulator.objects.filter(serial_number__gt=2000)
expected_rendering = (u"Pepulators:[<Pepulator: Pepulator #2345>, "
"<Pepulator: Pepulator #2346>]")
rendering = model_filters.as_list_block(pepulator_list)
model_nodes.get_template.assert_called_with('model_blocks/object_list.html')
self.assertEqual(rendering, expected_rendering)
def test_filter_is_registered(self):
"""Test that the filter can be used from within a template"""
template = Template(('{% load model_filters %}'
'{{ pepulators|as_list_block }}'))
pepulator_list = Pepulator.objects.filter(serial_number__gt=2000)
context = Context({'pepulators':pepulator_list})
expected_rendering = (u"Pepulators:[<Pepulator: Pepulator #2345>, "
"<Pepulator: Pepulator #2346>]")
rendering = template.render(context)
model_nodes.get_template.assert_called_with('model_blocks/object_list.html')
self.assertEqual(rendering, expected_rendering)
def test_empty_queryset(self):
"""Test that the filter can be used from within a template"""
template = Template(('{% load model_filters %}'
'{{ pepulators|as_list_block }}'))
pepulator_list = Pepulator.objects.filter(serial_number__gt=5000)
context = Context({'pepulators':pepulator_list})
expected_rendering = (u"Pepulators:[]")
rendering = template.render(context)
model_nodes.get_template.assert_called_with('model_blocks/object_list.html')
self.assertEqual(rendering, expected_rendering)
def test_non_query_set_results_in_no_model(self):
"""Test that when a non queryset is used, the model is None"""
# Why? Because we try to read the model off of the queryset. If we just
# have a list of objects, then we don't know the model.
template = Template(('{% load model_filters %}'
'{{ pepulators|as_list_block }}'))
pepulator_list = [p for p in Pepulator.objects.filter(serial_number__gt=2000)]
context = Context({'pepulators':pepulator_list})
expected_rendering = (u"Nones:[<Pepulator: Pepulator #2345>, "
"<Pepulator: Pepulator #2346>]")
rendering = template.render(context)
model_nodes.get_template.assert_called_with('model_blocks/object_list.html')
self.assertEqual(rendering, expected_rendering)
def test_empty_list(self):
"""Test that when a non queryset is used, the model is None"""
template = Template(('{% load model_filters %}'
'{{ pepulators|as_list_block }}'))
pepulator_list = []
context = Context({'pepulators':pepulator_list})
expected_rendering = (u"Nones:[]")
rendering = template.render(context)
model_nodes.get_template.assert_called_with('model_blocks/object_list.html')
self.assertEqual(rendering, expected_rendering)
def test_alternate_title_is_used(self):
"""Test that a list title is used if provided"""
template = Template(('{% load model_filters %}'
'{{ pepulators|as_list_block:"Some Pepulators" }}'))
pepulator_list = Pepulator.objects.filter(serial_number__gt=2000)
context = Context({'pepulators':pepulator_list})
expected_rendering = (u"Some Pepulators:[<Pepulator: Pepulator #2345>, "
"<Pepulator: Pepulator #2346>]")
rendering = template.render(context)
model_nodes.get_template.assert_called_with('model_blocks/object_list.html')
self.assertEqual(rendering, expected_rendering)
class DetailBlockTagTest (TestCase):
fixtures = ['pepulator_factory_data.json']
def setUp(self):
# Mock Django's get_template so that it doesn't load a real file;
# instead just return a template that allows us to verify the context
model_nodes.get_template = Mock(
return_value=Template('{{ title|default_if_none:instance|safe }}:{{ model|safe }},{% for name, label, value, is_list in fields %}{{ name|safe }},{{ label|safe }},{% if not is_list %}{{ value|safe }}{% else %}[{% for item in value.all %}{{ item|safe }},{% endfor %}]{% endif %},{% endfor %}'))
def test_tag_is_registered(self):
"""Test that the filter can be used from within a template"""
template = Template(('{% load model_tags %}'
'{% with pepulator_factory_pepulator_detail_template="pepulator_factory/pepulator_detail.html" %}'
'{% detail_block pepulator %}'
'{% endwith %}'))
pepulator = Pepulator.objects.get(serial_number=1235)
context = Context({'pepulator':pepulator})
expected_detail = (u"Pepulator #1235:pepulator,"
"serial_number,serial number,1235,"
"height,height,12,"
"width,width,15,"
"manufacture_date,manufacture date,2011-06-10 11:12:33,"
"color,color,red,"
"address,address,ppr://1235/,"
"distributed_by,distributed by,Walmart,"
"knuckles,knuckles,[Knuckle of hardness 2.35,Knuckle of hardness 1.10,],"
"jambs,jambs,[],"
)
detail = template.render(context)
model_nodes.get_template.assert_called_with('pepulator_factory/pepulator_detail.html')
self.assertEqual(detail, expected_detail)
def test_with_specific_fields(self):
"""Test that the included fields spec is respected"""
template = Template(('{% load model_tags %}'
'{% with pepulator_factory_pepulator_detail_template="pepulator_factory/pepulator_detail.html" %}'
'{% with pepulator_factory_pepulator_fields="serial_number, color, height, width" %}'
'{% detail_block pepulator %}'
'{% endwith %}'
'{% endwith %}'))
pepulator = Pepulator.objects.get(serial_number=1235)
context = Context({'pepulator':pepulator})
expected_detail = (u"Pepulator #1235:pepulator,"
"serial_number,serial number,1235,"
"color,color,red,"
"height,height,12,"
"width,width,15,"
)
detail = template.render(context)
self.assertEqual(detail, expected_detail)
def test_with_excluded_fields(self):
"""Test that the excluded fields spec is respected"""
template = Template(('{% load model_tags %}'
'{% with pepulator_factory_pepulator_detail_template="pepulator_factory/pepulator_detail.html" %}'
'{% with pepulator_factory_pepulator_exclude="knuckles, jambs, color, address" %}'
'{% detail_block pepulator %}'
'{% endwith %}'
'{% endwith %}'))
pepulator = Pepulator.objects.get(serial_number=1235)
context = Context({'pepulator':pepulator})
expected_detail = (u"Pepulator #1235:pepulator,"
"serial_number,serial number,1235,"
"height,height,12,"
"width,width,15,"
"manufacture_date,manufacture date,2011-06-10 11:12:33,"
"distributed_by,distributed by,Walmart,"
)
detail = template.render(context)
self.assertEqual(detail, expected_detail)
def test_fail_on_wrong_number_of_arguments(self):
self.assertRaises(TemplateSyntaxError, Template,
('{% load model_tags %}'
'{% detail_block pepulator "overflow" %}'))
self.assertRaises(TemplateSyntaxError, Template,
('{% load model_tags %}'
'{% detail_block %}'))
class TeaserBlockTagTest (TestCase):
fixtures = ['pepulator_factory_data.json']
def setUp(self):
# Mock Django's get_template so that it doesn't load a real file;
# instead just return a template that allows us to verify the context
model_nodes.get_template = Mock(
return_value=Template('{{ title|default_if_none:instance|safe }}:{{ model|safe }},{% for name, label, value, is_list in fields %}{{ name|safe }},{{ label|safe }},{% if not is_list %}{{ value|safe }}{% else %}[{% for item in value.all %}{{ item|safe }},{% endfor %}]{% endif %},{% endfor %}'))
def test_tag_is_registered(self):
"""Test that the filter can be used from within a template"""
template = Template(('{% load model_tags %}'
'{% with pepulator_factory_pepulator_teaser_template="pepulator_factory/pepulator_teaser.html" %}'
'{% teaser_block pepulator %}'
'{% endwith %}'))
pepulator = Pepulator.objects.get(serial_number=1235)
context = Context({'pepulator':pepulator})
expected_teaser = (u"Pepulator #1235:pepulator,"
"serial_number,serial number,1235,"
"height,height,12,"
"width,width,15,"
"manufacture_date,manufacture date,2011-06-10 11:12:33,"
"color,color,red,"
"address,address,ppr://1235/,"
"distributed_by,distributed by,Walmart,"
"knuckles,knuckles,[Knuckle of hardness 2.35,Knuckle of hardness 1.10,],"
"jambs,jambs,[],"
)
teaser = template.render(context)
model_nodes.get_template.assert_called_with('pepulator_factory/pepulator_teaser.html')
self.assertEqual(teaser, expected_teaser)
def test_fail_on_wrong_number_of_arguments(self):
self.assertRaises(TemplateSyntaxError, Template,
('{% load model_tags %}'
'{% teaser_block pepulator "overflow" %}'))
self.assertRaises(TemplateSyntaxError, Template,
('{% load model_tags %}'
'{% teaser_block %}'))
class ListBlockTagTest (TestCase):
fixtures = ['pepulator_factory_data.json']
def setUp(self):
# Mock Django's get_template so that it doesn't load a real file;
# instead just return a template that allows us to verify the context
model_nodes.get_template = Mock(
return_value=Template('{{ title|default_if_none:model|capfirst }}{% if not title %}s{% endif %}:{{ instance_list|safe }}'))
def test_filter_is_registered(self):
"""Test that the filter can be used from within a template"""
template = Template(('{% load model_tags %}'
'{% with pepulator_factory_pepulator_list_template="pepulator_factory/pepulator_list.html" %}'
'{% list_block pepulators %}'
'{% endwith %}'))
pepulator_list = Pepulator.objects.filter(serial_number__gt=2000)
context = Context({'pepulators':pepulator_list})
expected_rendering = (u"Pepulators:[<Pepulator: Pepulator #2345>, "
"<Pepulator: Pepulator #2346>]")
rendering = template.render(context)
model_nodes.get_template.assert_called_with('pepulator_factory/pepulator_list.html')
self.assertEqual(rendering, expected_rendering)
def test_fail_on_wrong_number_of_arguments(self):
self.assertRaises(TemplateSyntaxError, Template,
('{% load model_tags %}'
'{% list_block pepulators "overflow" %}'))
self.assertRaises(TemplateSyntaxError, Template,
('{% load model_tags %}'
'{% list_block %}'))
class ModelBlockModuleTest (TestCase):
def test_all_tags_and_filters_loaded(self):
template = Template(('{% load model_blocks %}'
'{% detail_block pepulator %}'
'{% list_block pepulators %}'
'{{ pepulator|as_detail_block }}'
'{{ pepulators|as_list_block }}'))
# We just care that everything loaded, and we were able to get here
# without incidence.
self.assert_(True)
class SideEffectsTest (TestCase):
fixtures = ['pepulator_factory_data.json']
def setUp(self):
# Mock Django's get_template so that it doesn't load a real file;
# instead just return a template that allows us to verify the context
model_nodes.get_template = Mock(
return_value=Template('{{ title|default_if_none:model|capfirst }}{% if not title %}s{% endif %}'))
def test_model_doesnt_carry_over_into_future_blocks(self):
template = Template(('{% load model_tags %}'
'{{ model }}'
'{% list_block distributors %}'
'{{ model }}'))
distributor_list = Distributor.objects.all()
context = Context({'model':'My String',
'distributors':distributor_list})
expected_rendering = (u"My String"
"Distributors"
"My String")
rendering = template.render(context)
self.assertEqual(rendering, expected_rendering)
| 44.453782
| 304
| 0.578544
| 2,168
| 21,160
| 5.446033
| 0.095018
| 0.031507
| 0.023122
| 0.037351
| 0.870077
| 0.844584
| 0.835945
| 0.824257
| 0.822902
| 0.784958
| 0
| 0.024127
| 0.308554
| 21,160
| 475
| 305
| 44.547368
| 0.782858
| 0.094518
| 0
| 0.704969
| 0
| 0.015528
| 0.353259
| 0.129527
| 0
| 0
| 0
| 0
| 0.118012
| 1
| 0.086957
| false
| 0
| 0.024845
| 0
| 0.158385
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce34ebaf15612703873e6a27020070246ab042d8
| 7,197
|
py
|
Python
|
test-framework/test-suites/integration/tests/add/test_add_host_bonded.py
|
knutsonchris/stacki
|
33087dd5fa311984a66ccecfeee6f9c2c25f665d
|
[
"BSD-3-Clause"
] | 123
|
2015-05-12T23:36:45.000Z
|
2017-07-05T23:26:57.000Z
|
test-framework/test-suites/integration/tests/add/test_add_host_bonded.py
|
knutsonchris/stacki
|
33087dd5fa311984a66ccecfeee6f9c2c25f665d
|
[
"BSD-3-Clause"
] | 177
|
2015-06-05T19:17:47.000Z
|
2017-07-07T17:57:24.000Z
|
test-framework/test-suites/integration/tests/add/test_add_host_bonded.py
|
knutsonchris/stacki
|
33087dd5fa311984a66ccecfeee6f9c2c25f665d
|
[
"BSD-3-Clause"
] | 32
|
2015-06-07T02:25:03.000Z
|
2017-06-23T07:35:35.000Z
|
import json
from textwrap import dedent
import pytest
@pytest.mark.usefixtures("add_host_with_interface")
class TestAddHostBonded:
def test_no_hosts(self, host):
result = host.run('stack add host bonded')
assert result.rc == 255
assert result.stderr == dedent('''\
error - "host" argument is required
{host} [channel=string] [interfaces=string] [ip=string] [name=string] [network=string] [options=string]
''')
def test_no_matching_hosts(self, host):
result = host.run('stack add host bonded a:test')
assert result.rc == 255
assert result.stderr == dedent('''\
error - "host" argument is required
{host} [channel=string] [interfaces=string] [ip=string] [name=string] [network=string] [options=string]
''')
def test_multiple_hosts(self, host):
result = host.run('stack add host bonded frontend-0-0 backend-0-0')
assert result.rc == 255
assert result.stderr == dedent('''\
error - "host" argument must be unique
{host} [channel=string] [interfaces=string] [ip=string] [name=string] [network=string] [options=string]
''')
def test_no_channel(self, host):
result = host.run('stack add host bonded backend-0-0')
assert result.rc == 255
assert result.stderr == dedent('''\
error - "channel" parameter is required
{host} [channel=string] [interfaces=string] [ip=string] [name=string] [network=string] [options=string]
''')
def test_no_interfaces(self, host):
result = host.run('stack add host bonded backend-0-0 channel=bond0')
assert result.rc == 255
assert result.stderr == dedent('''\
error - "interfaces" parameter is required
{host} [channel=string] [interfaces=string] [ip=string] [name=string] [network=string] [options=string]
''')
def test_no_ip(self, host):
result = host.run('stack add host bonded backend-0-0 channel=bond0 '
'interfaces=eth0,eth1')
assert result.rc == 255
assert result.stderr == dedent('''\
error - "ip" parameter is required
{host} [channel=string] [interfaces=string] [ip=string] [name=string] [network=string] [options=string]
''')
def test_no_network(self, host):
result = host.run('stack add host bonded backend-0-0 channel=bond0 '
'interfaces=eth0,eth1 ip=192.168.0.1')
assert result.rc == 255
assert result.stderr == dedent('''\
error - "network" parameter is required
{host} [channel=string] [interfaces=string] [ip=string] [name=string] [network=string] [options=string]
''')
def test_invalid_network(self, host):
# Add a second interface to our backend
result = host.run('stack add host interface backend-0-0 interface=eth1')
assert result.rc == 0
# Add the bonded interface
result = host.run('stack add host bonded backend-0-0 channel=bond0 '
'interfaces=eth0,eth1 ip=192.168.0.1 network=test')
assert result.rc == 255
assert result.stderr == 'error - network "test" does not exist\n'
def test_missing_interface(self, host):
result = host.run('stack add host bonded backend-0-0 channel=bond0 '
'interfaces=eth0,eth1 ip=192.168.0.1 network=private')
assert result.rc == 255
assert result.stderr == 'error - interface "eth1" does not exist for host "backend-0-0"\n'
def test_comma_seperated_interfaces(self, host):
# Add a second interface to our backend
result = host.run('stack add host interface backend-0-0 interface=eth1')
assert result.rc == 0
# Add the bonded interface
result = host.run('stack add host bonded backend-0-0 channel=bond0 '
'interfaces=eth0,eth1 ip=192.168.0.1 network=private')
assert result.rc == 0
# Check the interface is in the database now
result = host.run('stack list host interface backend-0-0 output-format=json')
assert result.rc == 0
assert json.loads(result.stdout) == [
{
'channel': None,
'default': None,
'host': 'backend-0-0',
'interface': 'bond0',
'ip': '192.168.0.1',
'mac': None,
'module': 'bonding',
'name': 'backend-0-0',
'network': 'private',
'options': None,
'vlan': None
},
{
'channel': 'bond0',
'default': None,
'host': 'backend-0-0',
'interface': 'eth0',
'ip': None,
'mac': None,
'module': None,
'name': 'backend-0-0',
'network': None,
'options': None,
'vlan': None
},
{
'channel': 'bond0',
'default': None,
'host': 'backend-0-0',
'interface': 'eth1',
'ip': None,
'mac': None,
'module': None,
'name': 'backend-0-0',
'network': None,
'options': None,
'vlan': None
}
]
def test_space_seperated_interfaces(self, host):
# Add a second interface to our backend
result = host.run('stack add host interface backend-0-0 interface=eth1')
assert result.rc == 0
# Add the bonded interface
result = host.run('stack add host bonded backend-0-0 channel=bond0 '
'interfaces="eth0 eth1" ip=192.168.0.1 network=private')
assert result.rc == 0
# Check the interface is in the database now
result = host.run('stack list host interface backend-0-0 output-format=json')
assert result.rc == 0
assert json.loads(result.stdout) == [
{
'channel': None,
'default': None,
'host': 'backend-0-0',
'interface': 'bond0',
'ip': '192.168.0.1',
'mac': None,
'module': 'bonding',
'name': 'backend-0-0',
'network': 'private',
'options': None,
'vlan': None
},
{
'channel': 'bond0',
'default': None,
'host': 'backend-0-0',
'interface': 'eth0',
'ip': None,
'mac': None,
'module': None,
'name': 'backend-0-0',
'network': None,
'options': None,
'vlan': None
},
{
'channel': 'bond0',
'default': None,
'host': 'backend-0-0',
'interface': 'eth1',
'ip': None,
'mac': None,
'module': None,
'name': 'backend-0-0',
'network': None,
'options': None,
'vlan': None
}
]
def test_default_with_options(self, host):
# Add a second interface to our backend
result = host.run('stack add host interface backend-0-0 interface=eth1 default=true')
assert result.rc == 0
# Add the bonded interface
result = host.run('stack add host bonded backend-0-0 channel=bond0 '
'interfaces=eth0,eth1 ip=192.168.0.1 network=private options=test_options')
assert result.rc == 0
# Check the interface is in the database now
result = host.run('stack list host interface backend-0-0 output-format=json')
assert result.rc == 0
assert json.loads(result.stdout) == [
{
'channel': None,
'default': True,
'host': 'backend-0-0',
'interface': 'bond0',
'ip': '192.168.0.1',
'mac': None,
'module': 'bonding',
'name': 'backend-0-0',
'network': 'private',
'options': 'bonding-opts="test_options"',
'vlan': None
},
{
'channel': 'bond0',
'default': None,
'host': 'backend-0-0',
'interface': 'eth0',
'ip': None,
'mac': None,
'module': None,
'name': 'backend-0-0',
'network': None,
'options': None,
'vlan': None
},
{
'channel': 'bond0',
'default': None,
'host': 'backend-0-0',
'interface': 'eth1',
'ip': None,
'mac': None,
'module': None,
'name': 'backend-0-0',
'network': None,
'options': None,
'vlan': None
}
]
| 28.559524
| 106
| 0.632208
| 966
| 7,197
| 4.675983
| 0.089027
| 0.016383
| 0.071729
| 0.075714
| 0.911003
| 0.911003
| 0.911003
| 0.911003
| 0.89462
| 0.864069
| 0
| 0.039199
| 0.202445
| 7,197
| 251
| 107
| 28.673307
| 0.747735
| 0.0528
| 0
| 0.730233
| 0
| 0.032558
| 0.489052
| 0.007348
| 0
| 0
| 0
| 0
| 0.144186
| 1
| 0.055814
| false
| 0
| 0.013953
| 0
| 0.074419
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
020e09341ffea9ce59519650e80614b26a974b81
| 6,610
|
py
|
Python
|
tests/mixins.py
|
jarkkorantala/sqlalchemy-utils
|
7cee65f0a3074245b853425e19a732aa274bfa3e
|
[
"BSD-3-Clause"
] | 879
|
2015-01-01T12:06:35.000Z
|
2022-03-27T16:13:05.000Z
|
tests/mixins.py
|
jarkkorantala/sqlalchemy-utils
|
7cee65f0a3074245b853425e19a732aa274bfa3e
|
[
"BSD-3-Clause"
] | 418
|
2015-01-02T08:43:43.000Z
|
2022-03-25T15:49:21.000Z
|
tests/mixins.py
|
jarkkorantala/sqlalchemy-utils
|
7cee65f0a3074245b853425e19a732aa274bfa3e
|
[
"BSD-3-Clause"
] | 295
|
2015-01-06T14:19:33.000Z
|
2022-03-26T16:20:50.000Z
|
import pytest
import sqlalchemy as sa
class ThreeLevelDeepOneToOne(object):
@pytest.fixture
def Catalog(self, Base, Category):
class Catalog(Base):
__tablename__ = 'catalog'
id = sa.Column('_id', sa.Integer, primary_key=True)
category = sa.orm.relationship(
Category,
uselist=False,
backref='catalog'
)
return Catalog
@pytest.fixture
def Category(self, Base, SubCategory):
class Category(Base):
__tablename__ = 'category'
id = sa.Column('_id', sa.Integer, primary_key=True)
catalog_id = sa.Column(
'_catalog_id',
sa.Integer,
sa.ForeignKey('catalog._id')
)
sub_category = sa.orm.relationship(
SubCategory,
uselist=False,
backref='category'
)
return Category
@pytest.fixture
def SubCategory(self, Base, Product):
class SubCategory(Base):
__tablename__ = 'sub_category'
id = sa.Column('_id', sa.Integer, primary_key=True)
category_id = sa.Column(
'_category_id',
sa.Integer,
sa.ForeignKey('category._id')
)
product = sa.orm.relationship(
Product,
uselist=False,
backref='sub_category'
)
return SubCategory
@pytest.fixture
def Product(self, Base):
class Product(Base):
__tablename__ = 'product'
id = sa.Column('_id', sa.Integer, primary_key=True)
price = sa.Column(sa.Integer)
sub_category_id = sa.Column(
'_sub_category_id',
sa.Integer,
sa.ForeignKey('sub_category._id')
)
return Product
@pytest.fixture
def init_models(self, Catalog, Category, SubCategory, Product):
pass
class ThreeLevelDeepOneToMany(object):
@pytest.fixture
def Catalog(self, Base, Category):
class Catalog(Base):
__tablename__ = 'catalog'
id = sa.Column('_id', sa.Integer, primary_key=True)
categories = sa.orm.relationship(Category, backref='catalog')
return Catalog
@pytest.fixture
def Category(self, Base, SubCategory):
class Category(Base):
__tablename__ = 'category'
id = sa.Column('_id', sa.Integer, primary_key=True)
catalog_id = sa.Column(
'_catalog_id',
sa.Integer,
sa.ForeignKey('catalog._id')
)
sub_categories = sa.orm.relationship(
SubCategory, backref='category'
)
return Category
@pytest.fixture
def SubCategory(self, Base, Product):
class SubCategory(Base):
__tablename__ = 'sub_category'
id = sa.Column('_id', sa.Integer, primary_key=True)
category_id = sa.Column(
'_category_id',
sa.Integer,
sa.ForeignKey('category._id')
)
products = sa.orm.relationship(
Product,
backref='sub_category'
)
return SubCategory
@pytest.fixture
def Product(self, Base):
class Product(Base):
__tablename__ = 'product'
id = sa.Column('_id', sa.Integer, primary_key=True)
price = sa.Column(sa.Numeric)
sub_category_id = sa.Column(
'_sub_category_id',
sa.Integer,
sa.ForeignKey('sub_category._id')
)
def __repr__(self):
return '<Product id=%r>' % self.id
return Product
@pytest.fixture
def init_models(self, Catalog, Category, SubCategory, Product):
pass
class ThreeLevelDeepManyToMany(object):
@pytest.fixture
def Catalog(self, Base, Category):
catalog_category = sa.Table(
'catalog_category',
Base.metadata,
sa.Column('catalog_id', sa.Integer, sa.ForeignKey('catalog._id')),
sa.Column('category_id', sa.Integer, sa.ForeignKey('category._id'))
)
class Catalog(Base):
__tablename__ = 'catalog'
id = sa.Column('_id', sa.Integer, primary_key=True)
categories = sa.orm.relationship(
Category,
backref='catalogs',
secondary=catalog_category
)
return Catalog
@pytest.fixture
def Category(self, Base, SubCategory):
category_subcategory = sa.Table(
'category_subcategory',
Base.metadata,
sa.Column(
'category_id',
sa.Integer,
sa.ForeignKey('category._id')
),
sa.Column(
'subcategory_id',
sa.Integer,
sa.ForeignKey('sub_category._id')
)
)
class Category(Base):
__tablename__ = 'category'
id = sa.Column('_id', sa.Integer, primary_key=True)
sub_categories = sa.orm.relationship(
SubCategory,
backref='categories',
secondary=category_subcategory
)
return Category
@pytest.fixture
def SubCategory(self, Base, Product):
subcategory_product = sa.Table(
'subcategory_product',
Base.metadata,
sa.Column(
'subcategory_id',
sa.Integer,
sa.ForeignKey('sub_category._id')
),
sa.Column(
'product_id',
sa.Integer,
sa.ForeignKey('product._id')
)
)
class SubCategory(Base):
__tablename__ = 'sub_category'
id = sa.Column('_id', sa.Integer, primary_key=True)
products = sa.orm.relationship(
Product,
backref='sub_categories',
secondary=subcategory_product
)
return SubCategory
@pytest.fixture
def Product(self, Base):
class Product(Base):
__tablename__ = 'product'
id = sa.Column('_id', sa.Integer, primary_key=True)
price = sa.Column(sa.Numeric)
return Product
@pytest.fixture
def init_models(self, Catalog, Category, SubCategory, Product):
pass
| 28.864629
| 79
| 0.522542
| 596
| 6,610
| 5.558725
| 0.077181
| 0.054331
| 0.079686
| 0.043465
| 0.832176
| 0.822819
| 0.822819
| 0.774525
| 0.760942
| 0.727136
| 0
| 0
| 0.381846
| 6,610
| 228
| 80
| 28.991228
| 0.810817
| 0
| 0
| 0.73057
| 0
| 0
| 0.090469
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082902
| false
| 0.015544
| 0.010363
| 0.005181
| 0.238342
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
026bd83279fbac0f51bacbf47138a5022a5dd278
| 27,723
|
py
|
Python
|
src/ezcode/knapsack/__init__.py
|
zheng-gao/ez_code
|
fbf48990291aa57d6436d4548b0a6c25dfb8f82d
|
[
"MIT"
] | null | null | null |
src/ezcode/knapsack/__init__.py
|
zheng-gao/ez_code
|
fbf48990291aa57d6436d4548b0a6c25dfb8f82d
|
[
"MIT"
] | null | null | null |
src/ezcode/knapsack/__init__.py
|
zheng-gao/ez_code
|
fbf48990291aa57d6436d4548b0a6c25dfb8f82d
|
[
"MIT"
] | null | null | null |
from typing import Callable
class Knapsack:
@staticmethod
def best_value(
capacity: int,
sizes: list,
values: list,
quantities,
min_max: Callable = max,
zero_capacity_value=0,
fill_to_capacity=True,
output_item_list=True
):
if capacity < 0:
raise ValueError(f"Capacity cannot be negative: {capacity}")
for s in sizes:
if s <= 0:
raise ValueError(f"Item sizes must be positive: {sizes}")
if len(sizes) != len(values):
raise ValueError(f"The length of sizes {sizes} not match the length of values {values}")
if quantities:
if isinstance(quantities, list):
if len(quantities) != len(sizes):
raise ValueError(f"The length of quantities {quantities} not match the length of sizes {sizes}")
for q in quantities:
if q < 0:
raise ValueError(f"Item quantities cannot contain negative: {quantities}")
elif quantities < 0:
raise ValueError(f"Item quantities cannot be negative: {quantities}")
return Knapsack.best_value_with_limited_items_1d(
capacity=capacity,
sizes=sizes,
values=values,
quantities=quantities,
min_max=min_max,
zero_capacity_value=zero_capacity_value,
fill_to_capacity=fill_to_capacity,
output_dp_table=False,
output_item_list=output_item_list
)
else:
return Knapsack.best_value_with_unlimited_items_1d(
capacity=capacity,
sizes=sizes,
values=values,
min_max=min_max,
zero_capacity_value=zero_capacity_value,
fill_to_capacity=fill_to_capacity,
output_dp_table=False,
output_item_list=output_item_list
)
@staticmethod
def ways_to_fill(
capacity: int,
sizes: list,
quantities,
output_item_list=True
):
if capacity < 0:
raise ValueError(f"Capacity cannot be negative: {capacity}")
for s in sizes:
if s <= 0:
raise ValueError(f"Item sizes must be positive: {sizes}")
if quantities:
if isinstance(quantities, list):
if len(quantities) != len(sizes):
raise ValueError(f"The length of quantities {quantities} not match the length of sizes {sizes}")
for q in quantities:
if q < 0:
raise ValueError(f"Item quantities cannot contain negative: {quantities}")
elif quantities < 0:
raise ValueError(f"Item quantities cannot be negative: {quantities}")
return Knapsack.number_of_ways_to_fill_to_capacity_with_limited_items_1d(
capacity=capacity,
sizes=sizes,
quantities=quantities,
output_dp_table=False,
output_item_list=output_item_list
)
else:
return Knapsack.number_of_ways_to_fill_to_capacity_with_unlimited_items_1d(
capacity=capacity,
sizes=sizes,
output_dp_table=False,
output_item_list=output_item_list
)
@staticmethod
def best_value_with_limited_items_2d(
capacity: int,
sizes: list,
values: list,
min_max: Callable = max,
zero_capacity_value=0,
fill_to_capacity=True,
iterate_sizes_first=True,
output_dp_table=False,
output_item_list=True
):
"""
0-1 Knapsack
Bag Capacity = C
Items Sizes = [s0, s1, ... ]
Items Values = [v0, v1, ... ]
2D-Array "max_value" Init
Cap=[0, 1, 2, ... c_j-1, c_j, ..., C]
s_0 0, 0, 0, ... 0, v_0, ..., v_0 (where c_j-1 < w0 < c_j)
s_1 0,
... 0, Max Value
s_i 0, Other cells will be overwritten later
... 0,
s_N 0,
The meaning of max_value[i][c]:
Given the FIRST "i + 1" items, the max value of a bag of size "c" can make
value_without_item_i = max_value[i-1][c]
value_with_item_i = max_value[i - 1][c - w[i]] + v[i]
max_value[i - 1][c - w[i]] means if we put item i into the bag (+v[i]),
the max value that the rest of the capacity "c - w[i]" can make with a selection of the previous items
if the capacity of the bag is not large enough for the item i, max_value[i][c] = max_value[i - 1][c]
otherwise max_value[i][c] = max( value_without_item_i + value_with_item_i )
"""
infinity = float("-inf") if min_max == max else float("inf")
knapsack_init_value = infinity if fill_to_capacity else zero_capacity_value
knapsack_value = [[knapsack_init_value for _ in range(capacity + 1)] for _ in range(len(sizes))]
item_lists = None
if output_item_list:
item_lists = [[list() for _ in range(capacity + 1)] for _ in range(len(sizes))]
for i in range(len(sizes)): # init first column
knapsack_value[i][0] = zero_capacity_value
if fill_to_capacity:
knapsack_value[0][sizes[0]] = values[0] # init first row, c != w means not filled
if output_item_list:
item_lists[0][sizes[0]].append(0)
else:
for c in range(sizes[0], capacity + 1): # init first row, c < w means the bag is empty
knapsack_value[0][c] = values[0]
if output_item_list:
item_lists[0][c].append(0)
if iterate_sizes_first: # we can iterate either of the sizes or capacity first
for i in range(1, len(sizes)):
for c in range(1, capacity + 1):
if c < sizes[i]:
knapsack_value[i][c] = knapsack_value[i - 1][c]
else:
knapsack_value[i][c] = min_max(knapsack_value[i - 1][c], knapsack_value[i - 1][c - sizes[i]] + values[i])
if output_item_list:
if knapsack_value[i][c] == knapsack_init_value:
item_lists[i][c] = list()
elif knapsack_value[i][c] == knapsack_value[i - 1][c]:
item_lists[i][c] = item_lists[i - 1][c].copy()
else:
item_lists[i][c] = item_lists[i - 1][c - sizes[i]] + [i]
else:
for c in range(1, capacity + 1):
for i in range(1, len(sizes)):
if c < sizes[i]:
knapsack_value[i][c] = knapsack_value[i - 1][c]
else:
knapsack_value[i][c] = min_max(knapsack_value[i - 1][c], knapsack_value[i - 1][c - sizes[i]] + values[i])
if output_item_list:
if knapsack_value[i][c] == knapsack_init_value:
item_lists[i][c] = list()
elif knapsack_value[i][c] == knapsack_value[i - 1][c]:
item_lists[i][c] = item_lists[i - 1][c].copy()
else:
item_lists[i][c] = item_lists[i - 1][c - sizes[i]] + [i]
if output_dp_table:
return (knapsack_value, item_lists) if output_item_list else knapsack_value
else:
best_value = knapsack_value[len(sizes) - 1][capacity]
if output_item_list:
item_list = item_lists[len(sizes) - 1][capacity]
return (None, item_list) if best_value == knapsack_init_value else (best_value, item_list)
else:
return None if best_value == knapsack_init_value else best_value
@staticmethod
def best_value_with_limited_items_1d(
capacity: int,
sizes: list,
values: list,
quantities=1,
min_max: Callable = max,
zero_capacity_value=0,
fill_to_capacity=True,
output_dp_table=False,
output_item_list=True
):
"""
Rolling dp array: copy row i-1 to row i
We just need one row:
knapsack_value[c] means the max value that a bag with capacity c can make
Each loop will overwrite the knapsack_value[c]
Cannot swap loops
"""
if isinstance(quantities, int):
quantities_list = list()
for i in range(len(sizes)):
quantities_list.append(quantities)
quantities = quantities_list
else:
assert len(sizes) == len(quantities)
infinity = float("-inf") if min_max == max else float("inf")
knapsack_init_value = infinity if fill_to_capacity else zero_capacity_value
knapsack_value = [knapsack_init_value for _ in range(capacity + 1)]
knapsack_value[0] = zero_capacity_value
item_lists = None
if output_item_list:
item_lists = [list() for _ in range(capacity + 1)]
for i in range(len(sizes)): # must loop item sizes first, because we are rolling the rows not columns
for q in range(1, quantities[i] + 1): # it is same as flatten the items: sizes=[2,3] quantities=[1,2] ==> sizes=[2, 3, 3]
# c < sizes[i], knapsack_value[c] won't change
# Capacity is looping backward, otherwise the item will be put in to the knapsack multiple times
for c in range(capacity, sizes[i] - 1, -1):
knapsack_value[c] = min_max(knapsack_value[c], knapsack_value[c - sizes[i]] + values[i])
if output_item_list:
if knapsack_value[c] == knapsack_init_value:
item_lists[c] = list()
elif knapsack_value[c] == knapsack_value[c - sizes[i]] + values[i]:
item_lists[c] = item_lists[c - sizes[i]] + [i]
# Another solution
# for c in range(capacity, sizes[i] - 1, -1):
# for q in range(1, min(quantities[i], c // sizes[i]) + 1):
# knapsack_value[c] = min_max(knapsack_value[c], knapsack_value[c - q * sizes[i]] + q * values[i])
# if output_item_list:
# if knapsack_value[c] == knapsack_init_value:
# item_lists[c] = list()
# elif knapsack_value[c] == knapsack_value[c - q * sizes[i]] + q * values[i]:
# item_lists[c] = item_lists[c - q * sizes[i]] + [i] * q
if output_dp_table:
return (knapsack_value, item_lists) if output_item_list else knapsack_value
else:
best_value = knapsack_value[capacity]
if output_item_list:
return (None, item_lists[capacity]) if best_value == knapsack_init_value else (best_value, item_lists[capacity])
else:
return None if best_value == knapsack_init_value else best_value
@staticmethod
def best_value_with_unlimited_items_1d(
capacity: int,
sizes: list,
values: list,
min_max: Callable = max,
zero_capacity_value=0,
fill_to_capacity=True,
iterate_sizes_first=True,
output_dp_table=False,
output_item_list=True
):
""" Similar to rolling row solution, but the two loops can swap the order """
infinity = float("-inf") if min_max == max else float("inf")
knapsack_init_value = infinity if fill_to_capacity else zero_capacity_value
knapsack_value = [knapsack_init_value for _ in range(capacity + 1)]
knapsack_value[0] = zero_capacity_value
item_lists = None
if output_item_list:
item_lists = [list() for _ in range(capacity + 1)]
if iterate_sizes_first:
for i in range(len(sizes)):
for c in range(sizes[i], capacity + 1): # Looping forward, so items can be added multiple times
knapsack_value[c] = min_max(knapsack_value[c], knapsack_value[c - sizes[i]] + values[i])
if output_item_list:
if knapsack_value[c] == knapsack_init_value:
item_lists[c] = list()
elif knapsack_value[c] == knapsack_value[c - sizes[i]] + values[i]:
item_lists[c] = item_lists[c - sizes[i]] + [i]
else:
for c in range(1, capacity + 1): # Looping forward, so items can be added multiple times
for i in range(len(sizes)):
if c >= sizes[i]: # c < sizes[i], knapsack_value[c] won't change
knapsack_value[c] = min_max(knapsack_value[c], knapsack_value[c - sizes[i]] + values[i])
if output_item_list:
if knapsack_value[c] == knapsack_init_value:
item_lists[c] = list()
elif knapsack_value[c] == knapsack_value[c - sizes[i]] + values[i]:
item_lists[c] = item_lists[c - sizes[i]] + [i]
if output_dp_table:
return (knapsack_value, item_lists) if output_item_list else knapsack_value
else:
best_value = knapsack_value[capacity]
if output_item_list:
return (None, item_lists[capacity]) if best_value == knapsack_init_value else (best_value, item_lists[capacity])
else:
return None if best_value == knapsack_init_value else best_value
@staticmethod
def best_value_with_unlimited_items_2d(
capacity: int,
sizes: list,
values: list,
min_max: Callable = max,
zero_capacity_value=0,
fill_to_capacity=True,
iterate_sizes_first=True,
output_dp_table=False,
output_item_list=True
):
infinity = float("-inf") if min_max == max else float("inf")
knapsack_init_value = infinity if fill_to_capacity else zero_capacity_value
knapsack_value = [[knapsack_init_value for _ in range(capacity + 1)] for _ in range(len(sizes))]
item_lists = None
if output_item_list:
item_lists = [[list() for _ in range(capacity + 1)] for _ in range(len(sizes))]
for i in range(len(sizes)): # init first column
knapsack_value[i][0] = zero_capacity_value
for c in range(sizes[0], capacity + 1): # init first row, c < w means the bag is empty, c != w means not fill
if c % sizes[0] == 0 or not fill_to_capacity:
knapsack_value[0][c] = values[0] * (c // sizes[0])
if output_item_list:
item_lists[0][c].extend([0] * (c // sizes[0]))
if iterate_sizes_first: # we can iterate either of the sizes or capacity first
for i in range(1, len(sizes)):
for c in range(1, capacity + 1):
# if c < sizes[i]:
# knapsack_value[i][c] = knapsack_value[i - 1][c]
# else:
# best_value = knapsack_init_value
# for k in range(1, (c // sizes[i]) + 1):
# best_value = min_max(best_value, knapsack_value[i - 1][c - k * sizes[i]] + k * values[i])
# knapsack_value[i][c] = min_max(knapsack_value[i - 1][c], best_value)
knapsack_value[i][c] = knapsack_value[i - 1][c]
if output_item_list:
item_lists[i][c] = item_lists[i - 1][c].copy()
if c >= sizes[i]:
knapsack_value[i][c] = min_max(knapsack_value[i][c], knapsack_value[i][c - sizes[i]] + values[i])
if output_item_list:
if knapsack_value[i][c] == knapsack_init_value:
item_lists[i][c] = list()
elif knapsack_value[i][c] == knapsack_value[i][c - sizes[i]] + values[i]:
item_lists[i][c] = item_lists[i][c - sizes[i]] + [i]
else:
for c in range(1, capacity + 1):
for i in range(1, len(sizes)):
# if c < sizes[i]:
# knapsack_value[i][c] = knapsack_value[i - 1][c]
# else:
# best_value = knapsack_init_value
# for k in range(1, (c // sizes[i]) + 1):
# best_value = min_max(best_value, knapsack_value[i - 1][c - k * sizes[i]] + k * values[i])
# knapsack_value[i][c] = min_max(knapsack_value[i - 1][c], best_value)
knapsack_value[i][c] = knapsack_value[i - 1][c]
if output_item_list:
item_lists[i][c] = item_lists[i - 1][c].copy()
if c >= sizes[i]:
knapsack_value[i][c] = min_max(knapsack_value[i][c], knapsack_value[i][c - sizes[i]] + values[i])
if output_item_list:
if knapsack_value[i][c] == knapsack_init_value:
item_lists[i][c] = list()
elif knapsack_value[i][c] == knapsack_value[i][c - sizes[i]] + values[i]:
item_lists[i][c] = item_lists[i][c - sizes[i]] + [i]
if output_dp_table:
return (knapsack_value, item_lists) if output_item_list else knapsack_value
else:
best_value = knapsack_value[len(sizes) - 1][capacity]
if output_item_list:
item_list = item_lists[len(sizes) - 1][capacity]
return (None, item_list) if best_value == knapsack_init_value else (best_value, item_list)
else:
return None if best_value == knapsack_init_value else best_value
@staticmethod
def number_of_ways_to_fill_to_capacity_with_unlimited_items_2d(
capacity: int,
sizes: list,
output_dp_table=False,
output_item_list=True
):
"""
number_of_ways[i][c] means given the FIRST i + 1 items, the number of ways to make capacity c
number_of_ways[i][c] = number_of_ways[i - 1][c] + number_of_ways[i][c - sizes[i]]
"""
number_of_ways = [[0 for _ in range(capacity + 1)] for _ in range(len(sizes))]
combo_lists = None
if output_item_list:
combo_lists = [[None for _ in range(capacity + 1)] for _ in range(len(sizes))]
for i in range(len(sizes)): # init first column
number_of_ways[i][0] = 1 # no item for 0 capacity is 1 way
if output_item_list:
combo_lists[i][0] = [[]] # empty list for no item combo
for c in range(sizes[0], capacity + 1): # init first row
if c % sizes[0] == 0:
number_of_ways[0][c] = 1
if output_item_list:
combo_lists[0][c] = [[0] * (c // sizes[0])] # one combo of all the item 0
for i in range(1, len(sizes)):
for c in range(1, capacity + 1):
number_of_ways[i][c] = number_of_ways[i - 1][c]
if c >= sizes[i]:
number_of_ways[i][c] += number_of_ways[i][c - sizes[i]] # On the same line, no i - 1
if output_item_list:
combo_lists[i][c] = combo_lists[i - 1][c]
if c >= sizes[i] and combo_lists[i][c - sizes[i]] is not None:
new_combo_list = list()
for combo in combo_lists[i][c - sizes[i]]:
new_combo_list.append(combo + [i])
combo_lists[i][c] = combo_lists[i][c] + new_combo_list if combo_lists[i][c] is not None else new_combo_list
if output_dp_table:
return (number_of_ways, combo_lists) if output_item_list else number_of_ways
else:
best_value = number_of_ways[len(sizes) - 1][capacity]
if output_item_list:
combo_list = combo_lists[len(sizes) - 1][capacity]
return (best_value, combo_list)
else:
return best_value
@staticmethod
def number_of_ways_to_fill_to_capacity_with_unlimited_items_1d(
capacity: int,
sizes: list,
output_dp_table=False,
output_item_list=True
):
"""
number_of_ways[c] means the number of ways to make capacity c
rolling row[i-1] over to row[i]
number_of_ways[c] = number_of_ways[c] + number_of_ways[c - sizes[i]]
"""
number_of_ways = [0 for _ in range(capacity + 1)]
combo_lists = None
if output_item_list:
combo_lists = [None for _ in range(capacity + 1)]
number_of_ways[0] = 1 # no item for 0 capacity is 1 way
if output_item_list:
combo_lists[0] = [[]] # empty list for no item combo
for i in range(len(sizes)):
for c in range(sizes[i], capacity + 1): # c starts from sizes[i] (c >= sizes[i])
number_of_ways[c] += number_of_ways[c - sizes[i]] # + (c > sizes[i] and c % sizes[i] == 0)
if output_item_list:
if combo_lists[c - sizes[i]] is not None:
new_combo_list = list()
for combo in combo_lists[c - sizes[i]]:
new_combo_list.append(combo + [i])
combo_lists[c] = combo_lists[c] + new_combo_list if combo_lists[c] is not None else new_combo_list
if output_dp_table:
return (number_of_ways, combo_lists) if output_item_list else number_of_ways
else:
best_value = number_of_ways[capacity]
if output_item_list:
combo_list = combo_lists[capacity]
return (best_value, combo_list)
else:
return best_value
@staticmethod
def number_of_ways_to_fill_to_capacity_with_limited_items_2d(
capacity: int,
sizes: list,
output_dp_table=False,
output_item_list=True
):
"""
number_of_ways[i][c] means given the FIRST i items, the number of ways to make capacity c
number_of_ways[i][c] = number_of_ways[i - 1][c] + number_of_ways[i - 1][c - sizes[i]]
number_of_ways[i - 1][c] means without item[i], only use FIRST i - 1 items, the number of combos
number_of_ways[i - 1][c - sizes[i]] means with item[i], every combo that make c-sizes[i] can add item[i] to get a new combo that make
"""
number_of_ways = [[0 for _ in range(capacity + 1)] for _ in range(len(sizes))]
combo_lists = None
if output_item_list:
combo_lists = [[None for _ in range(capacity + 1)] for _ in range(len(sizes))]
for i in range(len(sizes)): # init first column
number_of_ways[i][0] = 1 # no item for 0 capacity is 1 way
if output_item_list:
combo_lists[i][0] = [[]] # empty list for no item combo
if sizes[0] <= capacity: # init first row
number_of_ways[0][sizes[0]] = 1
if output_item_list:
combo_lists[0][sizes[0]] = [[0]]
for i in range(1, len(sizes)):
for c in range(1, capacity + 1):
# if c < sizes[i]:
# number_of_ways[i][c] = number_of_ways[i - 1][c]
# elif c == sizes[i]:
# number_of_ways[i][c] = number_of_ways[i - 1][c] + number_of_ways[i - 1][c - sizes[i]]
number_of_ways[i][c] = number_of_ways[i - 1][c]
if c >= sizes[i]:
number_of_ways[i][c] += number_of_ways[i - 1][c - sizes[i]]
if output_item_list:
if combo_lists[i - 1][c] is not None:
combo_lists[i][c] = combo_lists[i - 1][c]
if c >= sizes[i] and combo_lists[i - 1][c - sizes[i]] is not None:
new_combo_list = list()
for combo in combo_lists[i - 1][c - sizes[i]]:
new_combo_list.append(combo + [i])
combo_lists[i][c] = combo_lists[i][c] + new_combo_list if combo_lists[i][c] is not None else new_combo_list
if output_dp_table:
return (number_of_ways, combo_lists) if output_item_list else number_of_ways
else:
best_value = number_of_ways[len(sizes) - 1][capacity]
if output_item_list:
combo_list = combo_lists[len(sizes) - 1][capacity]
return (best_value, combo_list)
else:
return best_value
@staticmethod
def number_of_ways_to_fill_to_capacity_with_limited_items_1d(
capacity: int,
sizes: list,
quantities=1,
output_dp_table=False,
output_item_list=True
):
"""
number_of_ways[c] means the number of ways to make capacity c
number_of_ways[c] = number_of_ways[c] + number_of_ways[c - sizes[i]]
"""
if isinstance(quantities, int):
quantities_list = list()
for i in range(len(sizes)):
quantities_list.append(quantities)
quantities = quantities_list
else:
assert len(sizes) == len(quantities)
number_of_ways = [0 for _ in range(capacity + 1)]
combo_lists = None
if output_item_list:
combo_lists = [None for _ in range(capacity + 1)]
number_of_ways[0] = 1 # no item for 0 capacity is 1 way
if output_item_list:
combo_lists[0] = [[]] # empty list for no item combo
for i in range(len(sizes)):
for q in range(1, quantities[i] + 1):
for c in range(capacity, sizes[i] - 1, -1): # c >= sizes[i]
number_of_ways[c] += number_of_ways[c - sizes[i]]
if output_item_list:
if combo_lists[c - sizes[i]] is not None:
new_combo_list = list()
for combo in combo_lists[c - sizes[i]]:
new_combo_list.append(combo + [i])
combo_lists[c] = combo_lists[c] + new_combo_list if combo_lists[c] is not None else new_combo_list
if output_dp_table:
return (number_of_ways, combo_lists) if output_item_list else number_of_ways
else:
best_value = number_of_ways[capacity]
if output_item_list:
combo_list = combo_lists[capacity] # It might have duplicates
unique_combo_list = list()
if combo_list:
combo_set = set()
for i, combo in enumerate(combo_list):
t = tuple(combo.sort())
if t not in combo_set:
unique_combo_list.append(combo)
combo_set.add(t)
return (len(unique_combo_list), unique_combo_list)
return (best_value, combo_list)
else:
return best_value
| 48.046794
| 145
| 0.537171
| 3,618
| 27,723
| 3.872305
| 0.049475
| 0.086296
| 0.057388
| 0.053676
| 0.889507
| 0.879515
| 0.863812
| 0.85025
| 0.821984
| 0.80257
| 0
| 0.013545
| 0.366194
| 27,723
| 576
| 146
| 48.130208
| 0.783791
| 0.165386
| 0
| 0.888172
| 0
| 0
| 0.026493
| 0
| 0
| 0
| 0
| 0
| 0.004301
| 1
| 0.021505
| false
| 0
| 0.002151
| 0
| 0.088172
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65ffb62169d811cc14af150c5eafa69ec8772792
| 19,924
|
py
|
Python
|
data/battle_animation_scripts.py
|
kielbasiago/WorldsCollide
|
5aa7cffdecd14754c9eaa83cd0ad4d0282cc2cc2
|
[
"MIT"
] | 7
|
2022-01-15T02:53:53.000Z
|
2022-02-17T00:51:32.000Z
|
data/battle_animation_scripts.py
|
asilverthorn/WorldsCollide
|
5aa7cffdecd14754c9eaa83cd0ad4d0282cc2cc2
|
[
"MIT"
] | 8
|
2022-01-16T02:45:24.000Z
|
2022-03-21T02:08:27.000Z
|
data/battle_animation_scripts.py
|
asilverthorn/WorldsCollide
|
5aa7cffdecd14754c9eaa83cd0ad4d0282cc2cc2
|
[
"MIT"
] | 5
|
2022-01-15T02:53:38.000Z
|
2022-01-19T17:42:10.000Z
|
# List of addresses within the Battle Animation Scripts for the following commands which cause screen flashes:
# B0 - Set background palette color addition (absolute)
# B5 - Add color to background palette (relative)
# AF - Set background palette color subtraction (absolute)
# B6 - Subtract color from background palette (relative)
# By changing address + 1 to E0 (for absolute) or F0 (for relative), it causes no change to the background color (that is, no flash)
BATTLE_ANIMATION_FLASHES = {
"Goner": [
0x100088, # AF E0 - set background color subtraction to 0 (black)
0x10008C, # B6 61 - increase background color subtraction by 1 (red)
0x100092, # B6 31 - decrease background color subtraction by 1 (yellow)
0x100098, # B6 81 - increase background color subtraction by 1 (cyan)
0x1000A1, # B6 91 - decrease background color subtraction by 1 (cyan)
0x1000A3, # B6 21 - increase background color subtraction by 1 (yellow)
0x1000D3, # B6 8F - increase background color subtraction by 15 (cyan)
0x1000DF, # B0 FF - set background color addition to 31 (white)
0x100172, # B5 F2 - decrease background color addition by 2 (white)
],
"Final KEFKA Death": [
0x10023A, # B0 FF - set background color addition to 31 (white)
0x100240, # B5 F4 - decrease background color addition by 4 (white)
0x100248, # B0 FF - set background color addition to 31 (white)
0x10024E, # B5 F4 - decrease background color addition by 4 (white)
],
"Atom Edge": [ # Also True Edge
0x1003D0, # AF E0 - set background color subtraction to 0 (black)
0x1003DD, # B6 E1 - increase background color subtraction by 1 (black)
0x1003E6, # B6 E1 - increase background color subtraction by 1 (black)
0x10044B, # B6 F1 - decrease background color subtraction by 1 (black)
0x100457, # B6 F1 - decrease background color subtraction by 1 (black)
],
"Boss Death": [
0x100476, # B0 FF - set background color addition to 31 (white)
0x10047C, # B5 F4 - decrease background color addition by 4 (white)
0x100484, # B0 FF - set background color addition to 31 (white)
0x100497, # B5 F4 - decrease background color addition by 4 (white)
],
"Transform into Magicite": [
0x100F30, # B0 FF - set background color addition to 31 (white)
0x100F3F, # B5 F2 - decrease background color addition by 2 (white)
0x100F4E, # B5 F2 - decrease background color addition by 2 (white)
],
"Purifier": [
0x101340, # AF E0 - set background color subtraction to 0 (black)
0x101348, # B6 62 - increase background color subtraction by 2 (red)
0x101380, # B6 81 - increase background color subtraction by 1 (cyan)
0x10138A, # B6 F1 - decrease background color subtraction by 1 (black)
],
"Wall": [
0x10177B, # AF E0 - set background color subtraction to 0 (black)
0x10177F, # B6 61 - increase background color subtraction by 1 (red)
0x101788, # B6 51 - decrease background color subtraction by 1 (magenta)
0x101791, # B6 81 - increase background color subtraction by 1 (cyan)
0x10179A, # B6 31 - decrease background color subtraction by 1 (yellow)
0x1017A3, # B6 41 - increase background color subtraction by 1 (magenta)
0x1017AC, # B6 91 - decrease background color subtraction by 1 (cyan)
0x1017B5, # B6 51 - decrease background color subtraction by 1 (magenta)
],
"Pearl": [
0x10190E, # B0 E0 - set background color addition to 0 (white)
0x101913, # B5 E2 - increase background color addition by 2 (white)
0x10191E, # B5 F1 - decrease background color addition by 1 (white)
0x10193E, # B6 C2 - increase background color subtraction by 2 (blue)
],
"Ice 3": [
0x101978, # B0 FF - set background color addition to 31 (white)
0x10197B, # B5 F4 - decrease background color addition by 4 (white)
0x10197E, # B5 F4 - decrease background color addition by 4 (white)
0x101981, # B5 F4 - decrease background color addition by 4 (white)
0x101984, # B5 F4 - decrease background color addition by 4 (white)
0x101987, # B5 F4 - decrease background color addition by 4 (white)
0x10198A, # B5 F4 - decrease background color addition by 4 (white)
0x10198D, # B5 F4 - decrease background color addition by 4 (white)
0x101990, # B5 F4 - decrease background color addition by 4 (white)
],
"Fire 3": [
0x1019FA, # B0 9F - set background color addition to 31 (red)
0x101A1C, # B5 94 - decrease background color addition by 4 (red)
],
"Sleep": [
0x101A23, # AF E0 - set background color subtraction to 0 (black)
0x101A29, # B6 E1 - increase background color subtraction by 1 (black)
0x101A33, # B6 F1 - decrease background color subtraction by 1 (black)
],
"7-Flush": [
0x101B43, # AF E0 - set background color subtraction to 0 (black)
0x101B47, # B6 61 - increase background color subtraction by 1 (red)
0x101B4D, # B6 51 - decrease background color subtraction by 1 (magenta)
0x101B53, # B6 81 - increase background color subtraction by 1 (cyan)
0x101B59, # B6 31 - decrease background color subtraction by 1 (yellow)
0x101B5F, # B6 41 - increase background color subtraction by 1 (magenta)
0x101B65, # B6 91 - decrease background color subtraction by 1 (cyan)
0x101B6B, # B6 51 - decrease background color subtraction by 1 (magenta)
],
"H-Bomb": [
0x101BC5, # B0 E0 - set background color addition to 0 (white)
0x101BC9, # B5 E1 - increase background color addition by 1 (white)
0x101C13, # B5 F1 - decrease background color addition by 1 (white)
],
"Revenger": [
0x101C62, # AF E0 - set background color subtraction to 0 (black)
0x101C66, # B6 81 - increase background color subtraction by 1 (cyan)
0x101C6C, # B6 41 - increase background color subtraction by 1 (magenta)
0x101C72, # B6 91 - decrease background color subtraction by 1 (cyan)
0x101C78, # B6 21 - increase background color subtraction by 1 (yellow)
0x101C7E, # B6 51 - decrease background color subtraction by 1 (magenta)
0x101C84, # B6 81 - increase background color subtraction by 1 (cyan)
0x101C86, # B6 31 - decrease background color subtraction by 1 (yellow)
0x101C8C, # B6 91 - decrease background color subtraction by 1 (cyan)
],
"Phantasm": [
0x101DFD, # AF E0 - set background color subtraction to 0 (black)
0x101E03, # B6 E1 - increase background color subtraction by 1 (black)
0x101E07, # B0 FF - set background color addition to 31 (white)
0x101E0D, # B5 F4 - decrease background color addition by 4 (white)
0x101E15, # B6 E2 - increase background color subtraction by 2 (black)
0x101E1F, # B0 FF - set background color addition to 31 (white)
0x101E27, # B5 F4 - decrease background color addition by 4 (white)
0x101E2F, # B6 E2 - increase background color subtraction by 2 (black)
0x101E3B, # B6 F1 - decrease background color subtraction by 1 (black)
],
"TigerBreak": [
0x10240D, # B0 FF - set background color addition to 31 (white)
0x102411, # B5 F2 - decrease background color addition by 2 (white)
0x102416, # B5 F2 - decrease background color addition by 2 (white)
],
"Metamorph": [
0x102595, # AF E0 - set background color subtraction to 0 (black)
0x102599, # B6 61 - increase background color subtraction by 1 (red)
0x1025AF, # B6 71 - decrease background color subtraction by 1 (red)
],
"Cat Rain": [
0x102677, # B0 FF - set background color addition to 31 (white)
0x10267B, # B5 F1 - decrease background color addition by 1 (white)
],
"Charm": [
0x1026EE, # B0 FF - set background color addition to 31 (white)
0x1026FB, # B5 F1 - decrease background color addition by 1 (white)
],
"Mirager": [
0x102791, # B0 FF - set background color addition to 31 (white)
0x102795, # B5 F2 - decrease background color addition by 2 (white)
],
"SabreSoul": [
0x1027D3, # B0 FF - set background color addition to 31 (white)
0x1027DA, # B5 F2 - decrease background color addition by 2 (white)
],
"Back Blade": [
0x1028D3, # AF FF - set background color subtraction to 31 (black)
0x1028DF, # B6 F4 - decrease background color subtraction by 4 (black)
],
"RoyalShock": [
0x102967, # B0 FF - set background color addition to 31 (white)
0x10296B, # B5 F2 - decrease background color addition by 2 (white)
0x102973, # B5 F2 - decrease background color addition by 2 (white)
],
"Overcast": [
0x102C3A, # AF E0 - set background color subtraction to 0 (black)
0x102C55, # B6 E1 - increase background color subtraction by 1 (black)
0x102C8D, # B6 F1 - decrease background color subtraction by 1 (black)
0x102C91, # B6 F1 - decrease background color subtraction by 1 (black)
],
"Disaster": [
0x102CEE, # AF E0 - set background color subtraction to 0 (black)
0x102CF2, # B6 E1 - increase background color subtraction by 1 (black)
0x102D19, # B6 F1 - decrease background color subtraction by 1 (black)
],
"ForceField": [
0x102D3A, # B0 E0 - set background color addition to 0 (white)
0x102D48, # B5 E1 - increase background color addition by 1 (white)
0x102D64, # B5 F1 - decrease background color addition by 1 (white)
],
"Terra/Tritoch Lightning": [
0x102E05, # B0 E0 - set background color addition to 0 (white)
0x102E09, # B5 81 - increase background color addition by 1 (red)
0x102E24, # B5 61 - increase background color addition by 1 (cyan)
],
"S. Cross": [
0x102EDA, # AF E0 - set background color subtraction to 0 (black)
0x102EDE, # B6 E2 - increase background color subtraction by 2 (black)
0x102FA8, # B6 F2 - decrease background color subtraction by 2 (black)
0x102FB1, # B0 E0 - set background color addition to 0 (white)
0x102FBE, # B5 E2 - increase background color addition by 2 (white)
0x102FD9, # B5 F2 - decrease background color addition by 2 (white)
],
"Mind Blast": [
0x102FED, # B0 E0 - set background color addition to 0 (white)
0x102FF1, # B5 81 - increase background color addition by 1 (red)
0x102FF7, # B5 91 - decrease background color addition by 1 (red)
0x102FF9, # B5 21 - increase background color addition by 1 (blue)
0x102FFF, # B5 31 - decrease background color addition by 1 (blue)
0x103001, # B5 C1 - increase background color addition by 1 (yellow)
0x103007, # B5 91 - decrease background color addition by 1 (red)
0x10300D, # B5 51 - decrease background color addition by 1 (green)
0x103015, # B5 E2 - increase background color addition by 2 (white)
0x10301F, # B5 F1 - decrease background color addition by 1 (white)
],
"Flare Star": [
0x1030F5, # B0 E0 - set background color addition to 0 (white)
0x103106, # B5 81 - increase background color addition by 1 (red)
0x10310D, # B5 E2 - increase background color addition by 2 (white)
0x103123, # B5 71 - decrease background color addition by 1 (cyan)
0x10312E, # B5 91 - decrease background color addition by 1 (red)
],
"Quasar": [
0x1031D2, # AF E0 - set background color subtraction to 0 (black)
0x1031D6, # B6 E1 - increase background color subtraction by 1 (black)
0x1031FA, # B6 F1 - decrease background color subtraction by 1 (black)
],
"R.Polarity": [
0x10328B, # B0 FF - set background color addition to 31 (white)
0x103292, # B5 F1 - decrease background color addition by 1 (white)
],
"Rippler": [
0x1033C6, # B0 FF - set background color addition to 31 (white)
0x1033CA, # B5 F1 - decrease background color addition by 1 (white)
],
"Step Mine": [
0x1034D9, # B0 FF - set background color addition to 31 (white)
0x1034E0, # B5 F4 - decrease background color addition by 4 (white)
],
"L.5 Doom": [
0x1035E6, # B0 FF - set background color addition to 31 (white)
0x1035F6, # B5 F4 - decrease background color addition by 4 (white)
],
"Megazerk": [
0x103757, # B0 80 - set background color addition to 0 (red)
0x103761, # B5 82 - increase background color addition by 2 (red)
0x10378F, # B5 92 - decrease background color addition by 2 (red)
0x103795, # B5 92 - decrease background color addition by 2 (red)
0x10379B, # B5 92 - decrease background color addition by 2 (red)
0x1037A1, # B5 92 - decrease background color addition by 2 (red)
0x1037A7, # B5 92 - decrease background color addition by 2 (red)
0x1037AD, # B5 92 - decrease background color addition by 2 (red)
0x1037B3, # B5 92 - decrease background color addition by 2 (red)
0x1037B9, # B5 92 - decrease background color addition by 2 (red)
0x1037C0, # B5 92 - decrease background color addition by 2 (red)
],
"Schiller": [
0x103819, # B0 FF - set background color addition to 31 (white)
0x10381D, # B5 F4 - decrease background color addition by 4 (white)
],
"WallChange": [
0x10399E, # B0 FF - set background color addition to 31 (white)
0x1039A3, # B5 F2 - decrease background color addition by 2 (white)
0x1039A9, # B5 F2 - decrease background color addition by 2 (white)
0x1039AF, # B5 F2 - decrease background color addition by 2 (white)
0x1039B5, # B5 F2 - decrease background color addition by 2 (white)
0x1039BB, # B5 F2 - decrease background color addition by 2 (white)
0x1039C1, # B5 F2 - decrease background color addition by 2 (white)
0x1039C7, # B5 F2 - decrease background color addition by 2 (white)
0x1039CD, # B5 F2 - decrease background color addition by 2 (white)
0x1039D4, # B5 F2 - decrease background color addition by 2 (white)
],
"Ultima": [
0x1056CB, # AF 60 - set background color subtraction to 0 (red)
0x1056CF, # B6 C2 - increase background color subtraction by 2 (blue)
0x1056ED, # B0 FF - set background color addition to 31 (white)
0x1056F5, # B5 F1 - decrease background color addition by 1 (white)
],
"Bolt 3": [ # Also Giga Volt
0x10588E, # B0 FF - set background color addition to 31 (white)
0x105893, # B5 F4 - decrease background color addition by 4 (white)
0x105896, # B5 F4 - decrease background color addition by 4 (white)
0x105899, # B5 F4 - decrease background color addition by 4 (white)
0x10589C, # B5 F4 - decrease background color addition by 4 (white)
0x1058A1, # B5 F4 - decrease background color addition by 4 (white)
0x1058A6, # B5 F4 - decrease background color addition by 4 (white)
0x1058AB, # B5 F4 - decrease background color addition by 4 (white)
0x1058B0, # B5 F4 - decrease background color addition by 4 (white)
],
"X-Zone": [
0x105A5D, # B0 FF - set background color addition to 31 (white)
0x105A6A, # B5 F2 - decrease background color addition by 2 (white)
0x105A79, # B5 F2 - decrease background color addition by 2 (white)
],
"Dispel": [
0x105DC2, # B0 FF - set background color addition to 31 (white)
0x105DC9, # B5 F1 - decrease background color addition by 1 (white)
0x105DD2, # B5 F1 - decrease background color addition by 1 (white)
0x105DDB, # B5 F1 - decrease background color addition by 1 (white)
0x105DE4, # B5 F1 - decrease background color addition by 1 (white)
0x105DED, # B5 F1 - decrease background color addition by 1 (white)
],
"Muddle": [ # Also L.3 Muddle, Confusion
0x1060EA, # B0 FF - set background color addition to 31 (white)
0x1060EE, # B5 F1 - decrease background color addition by 1 (white)
],
"Shock": [
0x1068BE, # B0 FF - set background color addition to 31 (white)
0x1068D0, # B5 F1 - decrease background color addition by 1 (white)
],
"Bum Rush": [
0x106C3E, # B0 E0 - set background color addition to 0 (white)
0x106C47, # B0 E0 - set background color addition to 0 (white)
0x106C53, # B0 E0 - set background color addition to 0 (white)
0x106C7E, # B0 FF - set background color addition to 31 (white)
0x106C87, # B0 E0 - set background color addition to 0 (white)
0x106C95, # B0 FF - set background color addition to 31 (white)
0x106C9E, # B0 E0 - set background color addition to 0 (white)
],
"Stunner": [
0x1071BA, # B0 20 - set background color addition to 0 (blue)
0x1071C1, # B5 24 - increase background color addition by 4 (blue)
0x1071CA, # B5 24 - increase background color addition by 4 (blue)
0x1071D5, # B5 24 - increase background color addition by 4 (blue)
0x1071DE, # B5 24 - increase background color addition by 4 (blue)
0x1071E9, # B5 24 - increase background color addition by 4 (blue)
0x1071F2, # B5 24 - increase background color addition by 4 (blue)
0x1071FD, # B5 24 - increase background color addition by 4 (blue)
0x107206, # B5 24 - increase background color addition by 4 (blue)
0x107211, # B5 24 - increase background color addition by 4 (blue)
0x10721A, # B5 24 - increase background color addition by 4 (blue)
0x10725A, # B5 32 - decrease background color addition by 2 (blue)
],
"Quadra Slam": [ # Also Quadra Slice
0x1073DC, # B0 FF - set background color addition to 31 (white)
0x1073EE, # B5 F2 - decrease background color addition by 2 (white)
0x1073F3, # B5 F2 - decrease background color addition by 2 (white)
0x107402, # B0 5F - set background color addition to 31 (green)
0x107424, # B5 54 - decrease background color addition by 4 (green)
0x107429, # B5 54 - decrease background color addition by 4 (green)
0x107436, # B0 3F - set background color addition to 31 (blue)
0x107458, # B5 34 - decrease background color addition by 4 (blue)
0x10745D, # B5 34 - decrease background color addition by 4 (blue)
0x107490, # B0 9F - set background color addition to 31 (red)
0x1074B2, # B5 94 - decrease background color addition by 4 (red)
0x1074B7, # B5 94 - decrease background color addition by 4 (red)
],
"Slash": [
0x1074F4, # B0 FF - set background color addition to 31 (white)
0x1074FD, # B5 F2 - decrease background color addition by 2 (white)
0x107507, # B5 F2 - decrease background color addition by 2 (white)
],
"Flash": [
0x107850, # B0 FF - set background color addition to 31 (white)
0x10785C, # B5 F1 - decrease background color addition by 1 (white)
]
}
| 58.428152
| 133
| 0.630546
| 2,489
| 19,924
| 5.046605
| 0.160305
| 0.279436
| 0.298464
| 0.224902
| 0.775575
| 0.766738
| 0.722793
| 0.722793
| 0.722793
| 0
| 0
| 0.152435
| 0.294067
| 19,924
| 340
| 134
| 58.6
| 0.740633
| 0.674312
| 0
| 0.144144
| 0
| 0
| 0.069106
| 0
| 0
| 0
| 0.315718
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a2fba5afd104e89bb7c06d80b25ac575e16cde2
| 2,528
|
py
|
Python
|
app/auth/forms/__init__.py
|
jg-725/IS219-FlaskAppProject
|
316aa298eda1bcda766ed085bb6f26ca7da7dfee
|
[
"BSD-3-Clause"
] | null | null | null |
app/auth/forms/__init__.py
|
jg-725/IS219-FlaskAppProject
|
316aa298eda1bcda766ed085bb6f26ca7da7dfee
|
[
"BSD-3-Clause"
] | null | null | null |
app/auth/forms/__init__.py
|
jg-725/IS219-FlaskAppProject
|
316aa298eda1bcda766ed085bb6f26ca7da7dfee
|
[
"BSD-3-Clause"
] | null | null | null |
from flask_wtf import FlaskForm
from wtforms import validators
from wtforms.fields import *
class login_form(FlaskForm):
email = EmailField('Email Address', [
validators.DataRequired(),
])
password = PasswordField('Password', [
validators.DataRequired(),
validators.length(min=6, max=35)
])
submit = SubmitField()
class register_form(FlaskForm):
email = EmailField('Email Address', [
validators.DataRequired(),
], description="You need to signup with an email")
password = PasswordField('Create Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match'),
], description="Create a password ")
confirm = PasswordField('Repeat Password', description="Please retype your password to confirm it is correct")
submit = SubmitField()
class create_user_form(FlaskForm):
email = EmailField('Email Address', [
validators.DataRequired(),
], description="You need to signup with an email")
password = PasswordField('Create Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match'),
], description="Create a password ")
confirm = PasswordField('Repeat Password', description="Please retype your password to confirm it is correct")
is_admin = BooleanField('Admin', render_kw={'value':'1'})
submit = SubmitField()
class profile_form(FlaskForm):
about = TextAreaField('About', [validators.length(min=6, max=300)],
description="Please add information about yourself")
submit = SubmitField()
class user_edit_form(FlaskForm):
about = TextAreaField('About', [validators.length(min=6, max=300)],
description="Please add information about yourself")
is_admin = BooleanField('Admin', render_kw={'value':'1'})
submit = SubmitField()
class security_form(FlaskForm):
email = EmailField('Email Address', [
validators.DataRequired(),
], description="You can change your email address")
password = PasswordField('Create A New Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords must match'),
], description="Create a password ")
confirm = PasswordField('Re-Enter New Password', description="Please retype your password to confirm it is correct")
submit = SubmitField()
class csv_upload(FlaskForm):
file = FileField()
submit = SubmitField()
| 32.410256
| 120
| 0.679589
| 260
| 2,528
| 6.553846
| 0.276923
| 0.103286
| 0.077465
| 0.065728
| 0.801056
| 0.787559
| 0.787559
| 0.787559
| 0.751174
| 0.751174
| 0
| 0.006458
| 0.203718
| 2,528
| 78
| 121
| 32.410256
| 0.84004
| 0
| 0
| 0.709091
| 0
| 0
| 0.259391
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.236364
| 0.054545
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
5a3481b2ed60e03ed802eb9ef17136804b5ee7a0
| 981
|
py
|
Python
|
pyhack/boris_stag.py
|
Krissmedt/runko
|
073306de9284f1502d0538d33545bc14c80e8b93
|
[
"MIT"
] | null | null | null |
pyhack/boris_stag.py
|
Krissmedt/runko
|
073306de9284f1502d0538d33545bc14c80e8b93
|
[
"MIT"
] | null | null | null |
pyhack/boris_stag.py
|
Krissmedt/runko
|
073306de9284f1502d0538d33545bc14c80e8b93
|
[
"MIT"
] | null | null | null |
import numpy as np
from pyhack.py_runko_aux import *
from pyhack.boris import *
def boris_staggered(tile,dtf=1):
c = tile.cfl
cont = tile.get_container(0)
pos = py_pos(cont)
vel = py_vel(cont)
E,B = py_em(cont)
nq = pos.shape[0]
dims = pos.shape[1]
vel = boris_rp(vel,E,B,c,cont.q,dtf=dtf)
g = ginv(c,vel*c)
for i in range(0,dims):
pos[:,i] += dtf*c*vel[:,i]*g
tile.delete_all_particles()
for i in range(0,nq):
cont.add_particle(pos[i,:],vel[i,:],1.0)
def boris_staggered_first(tile,dtf=1):
c = tile.cfl
cont = tile.get_container(0)
pos = py_pos(cont)
vel = py_vel(cont)
E,B = py_em(cont)
nq = pos.shape[0]
dims = pos.shape[1]
vel = boris_rp(vel,E,B,c,cont.q,dtf=0.5*dtf)
g = ginv(c,vel*c)
for i in range(0,dims):
pos[:,i] += dtf*c*vel[:,i]*g
tile.delete_all_particles()
for i in range(0,nq):
cont.add_particle(pos[i,:],vel[i,:],1.0)
| 18.509434
| 48
| 0.579001
| 183
| 981
| 2.989071
| 0.240437
| 0.014625
| 0.058501
| 0.080439
| 0.8117
| 0.8117
| 0.8117
| 0.8117
| 0.8117
| 0.8117
| 0
| 0.02439
| 0.247706
| 981
| 52
| 49
| 18.865385
| 0.716802
| 0
| 0
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0
| 0.090909
| 0
| 0.151515
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a395024f625042332e48560226cfb73aaa1b4a7
| 14,129
|
py
|
Python
|
angr/procedures/definitions/win32_d3dcompiler_47.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_d3dcompiler_47.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_d3dcompiler_47.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
# pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("d3dcompiler_47.dll")
prototypes = \
{
#
'D3DDisassemble11Trace': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeBottom(label="ID3D11ShaderTrace"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "pTrace", "StartStep", "NumSteps", "Flags", "ppDisassembly"]),
#
'D3DReadFileToBlob': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pFileName", "ppContents"]),
#
'D3DWriteBlobToFile': SimTypeFunction([SimTypeBottom(label="ID3DBlob"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pBlob", "pFileName", "bOverwrite"]),
#
'D3DCompile': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "Definition": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="D3D_SHADER_MACRO", pack=False, align=None), offset=0), SimTypeBottom(label="ID3DInclude"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "pSourceName", "pDefines", "pInclude", "pEntrypoint", "pTarget", "Flags1", "Flags2", "ppCode", "ppErrorMsgs"]),
#
'D3DCompile2': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "Definition": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="D3D_SHADER_MACRO", pack=False, align=None), offset=0), SimTypeBottom(label="ID3DInclude"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "pSourceName", "pDefines", "pInclude", "pEntrypoint", "pTarget", "Flags1", "Flags2", "SecondaryDataFlags", "pSecondaryData", "SecondaryDataSize", "ppCode", "ppErrorMsgs"]),
#
'D3DCompileFromFile': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "Definition": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="D3D_SHADER_MACRO", pack=False, align=None), offset=0), SimTypeBottom(label="ID3DInclude"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pFileName", "pDefines", "pInclude", "pEntrypoint", "pTarget", "Flags1", "Flags2", "ppCode", "ppErrorMsgs"]),
#
'D3DPreprocess': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "Definition": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="D3D_SHADER_MACRO", pack=False, align=None), offset=0), SimTypeBottom(label="ID3DInclude"), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "pSourceName", "pDefines", "pInclude", "ppCodeText", "ppErrorMsgs"]),
#
'D3DGetDebugInfo': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "ppDebugInfo"]),
#
'D3DReflect': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "pInterface", "ppReflector"]),
#
'D3DReflectLibrary': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "riid", "ppReflector"]),
#
'D3DDisassemble': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "Flags", "szComments", "ppDisassembly"]),
#
'D3DDisassembleRegion': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "Flags", "szComments", "StartByteOffset", "NumInsts", "pFinishByteOffset", "ppDisassembly"]),
#
'D3DCreateLinker': SimTypeFunction([SimTypePointer(SimTypeBottom(label="ID3D11Linker"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppLinker"]),
#
'D3DLoadModule': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeBottom(label="ID3D11Module"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "cbSrcDataSize", "ppModule"]),
#
'D3DCreateFunctionLinkingGraph': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="ID3D11FunctionLinkingGraph"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["uFlags", "ppFunctionLinkingGraph"]),
#
'D3DGetTraceInstructionOffsets': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), label="LPArray", offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "Flags", "StartInstIndex", "NumInsts", "pOffsets", "pTotalInsts"]),
#
'D3DGetInputSignatureBlob': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "ppSignatureBlob"]),
#
'D3DGetOutputSignatureBlob': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "ppSignatureBlob"]),
#
'D3DGetInputAndOutputSignatureBlob': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "ppSignatureBlob"]),
#
'D3DStripShader': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pShaderBytecode", "BytecodeLength", "uStripFlags", "ppStrippedBlob"]),
#
'D3DGetBlobPart': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="D3D_BLOB_PART"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "Part", "Flags", "ppPart"]),
#
'D3DSetBlobPart': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="D3D_BLOB_PART"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "Part", "Flags", "pPart", "PartSize", "ppNewShader"]),
#
'D3DCreateBlob': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Size", "ppBlob"]),
#
'D3DCompressShaders': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"pBytecode": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "BytecodeLength": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)}, name="D3D_SHADER_DATA", pack=False, align=None), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["uNumShaders", "pShaderData", "uFlags", "ppCompressedData"]),
#
'D3DDecompressShaders': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="ID3DBlob"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSrcData", "SrcDataSize", "uNumShaders", "uStartIndex", "pIndices", "uFlags", "ppShaders", "pTotalShaders"]),
#
'D3DDisassemble10Effect': SimTypeFunction([SimTypeBottom(label="ID3D10Effect"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="ID3DBlob"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pEffect", "Flags", "ppDisassembly"]),
}
lib.set_prototypes(prototypes)
| 190.932432
| 1,222
| 0.737561
| 1,429
| 14,129
| 7.253324
| 0.116165
| 0.07699
| 0.113459
| 0.140473
| 0.820646
| 0.817077
| 0.80849
| 0.789387
| 0.76604
| 0.757356
| 0
| 0.024334
| 0.080897
| 14,129
| 73
| 1,223
| 193.547945
| 0.773833
| 0.001982
| 0
| 0
| 0
| 0
| 0.217484
| 0.016445
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5a7f42aae312bdb1dfd1e806bfb1013a4638beeb
| 48
|
py
|
Python
|
surge_multiplier_mdp/__init__.py
|
mbattifarano/surge-multiplier-mdp
|
8a8477662a2a9b7daa7acb8b8cf486bef0ec8c05
|
[
"MIT"
] | null | null | null |
surge_multiplier_mdp/__init__.py
|
mbattifarano/surge-multiplier-mdp
|
8a8477662a2a9b7daa7acb8b8cf486bef0ec8c05
|
[
"MIT"
] | null | null | null |
surge_multiplier_mdp/__init__.py
|
mbattifarano/surge-multiplier-mdp
|
8a8477662a2a9b7daa7acb8b8cf486bef0ec8c05
|
[
"MIT"
] | null | null | null |
from .mdp_value_iteration import value_iteration
| 48
| 48
| 0.916667
| 7
| 48
| 5.857143
| 0.714286
| 0.682927
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 48
| 1
| 48
| 48
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ce5dcda8e728127b9f9d9754ec7ec959e800ef14
| 31,373
|
py
|
Python
|
modules/users_and_roles_tab.py
|
scrummastermind/sumologictoolbox
|
02d9acb970943521685091d36b8d5135e817c22c
|
[
"Apache-2.0"
] | null | null | null |
modules/users_and_roles_tab.py
|
scrummastermind/sumologictoolbox
|
02d9acb970943521685091d36b8d5135e817c22c
|
[
"Apache-2.0"
] | null | null | null |
modules/users_and_roles_tab.py
|
scrummastermind/sumologictoolbox
|
02d9acb970943521685091d36b8d5135e817c22c
|
[
"Apache-2.0"
] | null | null | null |
class_name = 'users_and_roles_tab'
from qtpy import QtCore, QtGui, QtWidgets, uic
import os
import sys
import re
import pathlib
import json
from logzero import logger
from modules.sumologic import SumoLogic
from modules.shared import ShowTextDialog
class users_and_roles_tab(QtWidgets.QWidget):
def __init__(self, mainwindow):
super(users_and_roles_tab, self).__init__()
self.mainwindow = mainwindow
self.tab_name = 'Users and Roles'
self.cred_usage = 'both'
users_and_roles_widget_ui = os.path.join(self.mainwindow.basedir, 'data/users_and_roles.ui')
uic.loadUi(users_and_roles_widget_ui, self)
# Connect the UI buttons to methods
# Connect Update Buttons
self.pushButtonUpdateUsersAndRolesLeft.clicked.connect(lambda: self.update_users_and_roles_lists(
self.listWidgetUsersLeft,
self.listWidgetRolesLeft,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionLeft.currentText())],
str(self.mainwindow.lineEditUserNameLeft.text()),
str(self.mainwindow.lineEditPasswordLeft.text())
))
self.pushButtonUpdateUsersAndRolesRight.clicked.connect(lambda: self.update_users_and_roles_lists(
self.listWidgetUsersRight,
self.listWidgetRolesRight,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionRight.currentText())],
str(self.mainwindow.lineEditUserNameRight.text()),
str(self.mainwindow.lineEditPasswordRight.text())
))
# Connect Search Bars
self.lineEditSearchUsersLeft.textChanged.connect(lambda: self.set_listwidget_filter(
self.listWidgetUsersLeft,
self.lineEditSearchUsersLeft.text()
))
self.lineEditSearchUsersRight.textChanged.connect(lambda: self.set_listwidget_filter(
self.listWidgetUsersRight,
self.lineEditSearchUsersRight.text()
))
self.lineEditSearchRolesLeft.textChanged.connect(lambda: self.set_listwidget_filter(
self.listWidgetRolesLeft,
self.lineEditSearchRolesLeft.text()
))
self.lineEditSearchRolesRight.textChanged.connect(lambda: self.set_listwidget_filter(
self.listWidgetRolesRight,
self.lineEditSearchRolesRight.text()
))
self.pushButtonCopyUserLeftToRight.clicked.connect(lambda: self.copy_user(
self.listWidgetUsersLeft,
self.listWidgetUsersRight,
self.listWidgetRolesRight,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionLeft.currentText())],
str(self.mainwindow.lineEditUserNameLeft.text()),
str(self.mainwindow.lineEditPasswordLeft.text()),
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionRight.currentText())],
str(self.mainwindow.lineEditUserNameRight.text()),
str(self.mainwindow.lineEditPasswordRight.text())
))
self.pushButtonCopyUserRightToLeft.clicked.connect(lambda: self.copy_user(
self.listWidgetUsersRight,
self.listWidgetUsersLeft,
self.listWidgetRolesLeft,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionRight.currentText())],
str(self.mainwindow.lineEditUserNameRight.text()),
str(self.mainwindow.lineEditPasswordRight.text()),
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionLeft.currentText())],
str(self.mainwindow.lineEditUserNameLeft.text()),
str(self.mainwindow.lineEditPasswordLeft.text())
))
self.pushButtonBackupUserLeft.clicked.connect(lambda: self.backup_user(
self.listWidgetUsersLeft,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionLeft.currentText())],
str(self.mainwindow.lineEditUserNameLeft.text()),
str(self.mainwindow.lineEditPasswordLeft.text())
))
self.pushButtonBackupUserRight.clicked.connect(lambda: self.backup_user(
self.listWidgetUsersRight,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionRight.currentText())],
str(self.mainwindow.lineEditUserNameRight.text()),
str(self.mainwindow.lineEditPasswordRight.text())
))
self.pushButtonUserJSONLeft.clicked.connect(lambda: self.view_user_json(
self.listWidgetUsersLeft,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionLeft.currentText())],
str(self.mainwindow.lineEditUserNameLeft.text()),
str(self.mainwindow.lineEditPasswordLeft.text())
))
self.pushButtonUserJSONRight.clicked.connect(lambda: self.view_user_json(
self.listWidgetUsersRight,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionRight.currentText())],
str(self.mainwindow.lineEditUserNameRight.text()),
str(self.mainwindow.lineEditPasswordRight.text())
))
self.pushButtonRestoreUserLeft.clicked.connect(lambda: self.restore_user(
self.listWidgetRolesLeft,
self.listWidgetUsersLeft,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionLeft.currentText())],
str(self.mainwindow.lineEditUserNameLeft.text()),
str(self.mainwindow.lineEditPasswordLeft.text())
))
self.pushButtonRestoreUserRight.clicked.connect(lambda: self.restore_user(
self.listWidgetRolesRight,
self.listWidgetUsersRight,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionRight.currentText())],
str(self.mainwindow.lineEditUserNameRight.text()),
str(self.mainwindow.lineEditPasswordRight.text())
))
self.pushButtonDeleteUserLeft.clicked.connect(lambda: self.delete_user(
self.listWidgetRolesLeft,
self.listWidgetUsersLeft,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionLeft.currentText())],
str(self.mainwindow.lineEditUserNameLeft.text()),
str(self.mainwindow.lineEditPasswordLeft.text())
))
self.pushButtonDeleteUserRight.clicked.connect(lambda: self.delete_user(
self.listWidgetRolesRight,
self.listWidgetUsersRight,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionRight.currentText())],
str(self.mainwindow.lineEditUserNameRight.text()),
str(self.mainwindow.lineEditPasswordRight.text())
))
self.pushButtonCopyRoleLeftToRight.clicked.connect(lambda: self.copy_role(
self.listWidgetRolesLeft,
self.listWidgetRolesRight,
self.listWidgetUsersRight,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionLeft.currentText())],
str(self.mainwindow.lineEditUserNameLeft.text()),
str(self.mainwindow.lineEditPasswordLeft.text()),
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionRight.currentText())],
str(self.mainwindow.lineEditUserNameRight.text()),
str(self.mainwindow.lineEditPasswordRight.text())
))
self.pushButtonCopyRoleRightToLeft.clicked.connect(lambda: self.copy_role(
self.listWidgetRolesRight,
self.listWidgetRolesLeft,
self.listWidgetUsersLeft,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionRight.currentText())],
str(self.mainwindow.lineEditUserNameRight.text()),
str(self.mainwindow.lineEditPasswordRight.text()),
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionLeft.currentText())],
str(self.mainwindow.lineEditUserNameLeft.text()),
str(self.mainwindow.lineEditPasswordLeft.text())
))
self.pushButtonBackupRoleLeft.clicked.connect(lambda: self.backup_role(
self.listWidgetRolesLeft,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionLeft.currentText())],
str(self.mainwindow.lineEditUserNameLeft.text()),
str(self.mainwindow.lineEditPasswordLeft.text())
))
self.pushButtonBackupRoleRight.clicked.connect(lambda: self.backup_role(
self.listWidgetRolesRight,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionRight.currentText())],
str(self.mainwindow.lineEditUserNameRight.text()),
str(self.mainwindow.lineEditPasswordRight.text())
))
self.pushButtonRoleJSONLeft.clicked.connect(lambda: self.view_role_json(
self.listWidgetRolesLeft,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionLeft.currentText())],
str(self.mainwindow.lineEditUserNameLeft.text()),
str(self.mainwindow.lineEditPasswordLeft.text())
))
self.pushButtonRoleJSONRight.clicked.connect(lambda: self.view_role_json(
self.listWidgetRolesRight,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionRight.currentText())],
str(self.mainwindow.lineEditUserNameRight.text()),
str(self.mainwindow.lineEditPasswordRight.text())
))
self.pushButtonRestoreRoleLeft.clicked.connect(lambda: self.restore_role(
self.listWidgetRolesLeft,
self.listWidgetUsersLeft,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionLeft.currentText())],
str(self.mainwindow.lineEditUserNameLeft.text()),
str(self.mainwindow.lineEditPasswordLeft.text())
))
self.pushButtonRestoreRoleRight.clicked.connect(lambda: self.restore_role(
self.listWidgetRolesRight,
self.listWidgetUsersRight,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionRight.currentText())],
str(self.mainwindow.lineEditUserNameRight.text()),
str(self.mainwindow.lineEditPasswordRight.text())
))
self.pushButtonDeleteRoleLeft.clicked.connect(lambda: self.delete_role(
self.listWidgetRolesLeft,
self.listWidgetUsersLeft,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionLeft.currentText())],
str(self.mainwindow.lineEditUserNameLeft.text()),
str(self.mainwindow.lineEditPasswordLeft.text())
))
self.pushButtonDeleteRoleRight.clicked.connect(lambda: self.delete_role(
self.listWidgetRolesRight,
self.listWidgetUsersRight,
self.mainwindow.loadedapiurls[str(self.mainwindow.comboBoxRegionRight.currentText())],
str(self.mainwindow.lineEditUserNameRight.text()),
str(self.mainwindow.lineEditPasswordRight.text())
))
def reset_stateful_objects(self, side='both'):
if side == 'both':
left = True
right = True
if side == 'left':
left = True
right = False
if side == 'right':
left = False
right = True
if left:
self.listWidgetUsersLeft.clear()
self.listWidgetUsersLeft.currentcontent = {}
self.listWidgetUsersLeft.updated = False
self.listWidgetRolesLeft.clear()
self.listWidgetRolesLeft.currentcontent = {}
self.listWidgetRolesLeft.updated = False
if right:
self.listWidgetUsersRight.clear()
self.listWidgetUsersRight.currentcontent = {}
self.listWidgetUsersRight.updated = False
self.listWidgetRolesRight.clear()
self.listWidgetRolesRight.currentcontent = {}
self.listWidgetRolesRight.updated = False
def set_listwidget_filter(self, ListWidget, filtertext):
for row in range(ListWidget.count()):
item = ListWidget.item(row)
widget = ListWidget.itemWidget(item)
if filtertext:
item.setHidden(not filtertext in item.text())
else:
item.setHidden(False)
def update_users_and_roles_lists(self, UserListWidget, RoleListWidget, url, id, key):
sumo = SumoLogic(id, key, endpoint=url, log_level=self.mainwindow.log_level)
try:
logger.info("[Users and Roles] Updating Users and Roles Lists")
UserListWidget.currentcontent = sumo.get_users_sync()
RoleListWidget.currentcontent = sumo.get_roles_sync()
self.update_users_and_roles_listwidgets(UserListWidget, RoleListWidget)
return
except Exception as e:
UserListWidget.updated = False
RoleListWidget.updated = False
logger.exception(e)
self.mainwindow.errorbox('Something went wrong:\n\n' + str(e))
return
def update_users_and_roles_listwidgets(self, UserListWidget, RoleListWidget):
try:
UserListWidget.clear()
RoleListWidget.clear()
UserListWidget.setSortingEnabled(True)
for object in UserListWidget.currentcontent:
item_name = object['firstName'] + ' ' + object['lastName']
item = QtWidgets.QListWidgetItem(item_name)
item.details = object
UserListWidget.addItem(item) # populate the list widget in the GUI
UserListWidget.updated = True
for object in RoleListWidget.currentcontent:
item_name = object['name']
item = QtWidgets.QListWidgetItem(item_name)
item.details = object
RoleListWidget.addItem(item) # populate the list widget in the GUI
RoleListWidget.updated = True
except Exception as e:
logger.exception(e)
self.mainwindow.errorbox('Something went wrong:\n\n' + str(e))
return
def copy_user(self, UserListWidgetFrom, UserListWidgetTo, RoleListWidgetTo, fromurl, fromid, fromkey,
tourl, toid, tokey):
# Need to add check if user already exists and interactively ask if any missing roles should be created
logger.info("[Users and Roles]Copying User(s)")
try:
selecteditems = UserListWidgetFrom.selectedItems()
if len(selecteditems) > 0: # make sure something was selected
fromsumo = SumoLogic(fromid, fromkey, endpoint=fromurl, log_level=self.mainwindow.log_level)
tosumo = SumoLogic(toid, tokey, endpoint=tourl, log_level=self.mainwindow.log_level)
for selecteditem in selecteditems:
user_id = selecteditem.details['id']
user = fromsumo.get_user_and_roles(user_id)
dest_roles = tosumo.get_roles_sync()
for source_role in user['roles']:
role_already_exists_in_dest = False
source_role_id = source_role['id']
for dest_role in dest_roles:
if dest_role['name'] == source_role['name']:
role_already_exists_in_dest = True
dest_role_id = dest_role['id']
if role_already_exists_in_dest:
user['roleIds'].append(dest_role_id)
user['roleIds'].remove(source_role_id)
else:
source_role['users'] = []
tosumo.create_role(source_role)
updated_dest_roles = tosumo.get_roles_sync()
for updated_dest_role in updated_dest_roles:
if updated_dest_role['name'] == source_role['name']:
user['roleIds'].append(updated_dest_role['id'])
user['roleIds'].remove(source_role_id)
tosumo.create_user(user['firstName'], user['lastName'], user['email'], user['roleIds'])
self.update_users_and_roles_lists(UserListWidgetTo, RoleListWidgetTo, tourl, toid, tokey)
return
else:
self.mainwindow.errorbox('You have not made any selections.')
return
except Exception as e:
logger.exception(e)
self.mainwindow.errorbox('Something went wrong:' + str(e))
self.update_users_and_roles_lists(UserListWidgetTo, RoleListWidgetTo, tourl, toid, tokey)
return
def backup_user(self, UserListWidget, url, id, key):
logger.info("[Users and Roles]Backing Up User(s)")
selecteditems = UserListWidget.selectedItems()
if len(selecteditems) > 0: # make sure something was selected
savepath = str(QtWidgets.QFileDialog.getExistingDirectory(self, "Select Backup Directory"))
if os.access(savepath, os.W_OK):
message = ''
sumo = SumoLogic(id, key, endpoint=url, log_level=self.mainwindow.log_level)
for selecteditem in selecteditems:
user_id = selecteditem.details['id']
try:
export = sumo.get_user_and_roles(user_id)
savefilepath = pathlib.Path(savepath + r'/' + str(selecteditem.text()) + r'.user.json')
if savefilepath:
with savefilepath.open(mode='w') as filepointer:
json.dump(export, filepointer)
message = message + str(selecteditem.text()) + r'.json' + '\n'
except Exception as e:
logger.exception(e)
self.mainwindow.errorbox('Something went wrong:\n\n' + str(e))
return
self.mainwindow.infobox('Wrote files: \n\n' + message)
else:
self.mainwindow.errorbox("You don't have permissions to write to that directory")
else:
self.mainwindow.errorbox('No user selected.')
return
def view_user_json(self, UserListWidget, url, id, key):
logger.info("[Users and Roles]Viewing User(s) JSON")
selecteditems = UserListWidget.selectedItems()
if len(selecteditems) > 0: # make sure something was selected
try:
sumo = SumoLogic(id, key, endpoint=url, log_level=self.mainwindow.log_level)
json_text = ''
for selecteditem in selecteditems:
user_id = selecteditem.details['id']
user = sumo.get_user(user_id)
json_text = json_text + json.dumps(user, indent=4, sort_keys=True) + '\n\n'
self.json_window = ShowTextDialog('JSON', json_text, self.mainwindow.basedir)
self.json_window.show()
except Exception as e:
logger.exception(e)
self.mainwindow.errorbox('Something went wrong:\n\n' + str(e))
return
else:
self.mainwindow.errorbox('No user selected.')
return
def restore_user(self, RoleListWidget, UserListWidget, url, id, key):
logger.info("[Users and Roles]Restoring User(s)")
if UserListWidget.updated == True:
filter = "JSON (*.json)"
filelist, status = QtWidgets.QFileDialog.getOpenFileNames(self, "Open file(s)...", os.getcwd(),
filter)
if len(filelist) > 0:
sumo = SumoLogic(id, key, endpoint=url, log_level=self.mainwindow.log_level)
for file in filelist:
try:
with open(file) as filepointer:
user = json.load(filepointer)
except Exception as e:
logger.exception(e)
self.mainwindow.errorbox(
"Something went wrong reading the file. Do you have the right file permissions? Does it contain valid JSON?")
return
try:
dest_roles = sumo.get_roles_sync()
for source_role in user['roles']:
role_already_exists_in_dest = False
source_role_id = source_role['id']
for dest_role in dest_roles:
if dest_role['name'] == source_role['name']:
role_already_exists_in_dest = True
dest_role_id = dest_role['id']
if role_already_exists_in_dest:
# print('found role at target: ' + source_role['name'])
user['roleIds'].append(dest_role_id)
user['roleIds'].remove(source_role_id)
else:
source_role['users'] = []
sumo.create_role(source_role)
updated_dest_roles = sumo.get_roles_sync()
for updated_dest_role in updated_dest_roles:
if updated_dest_role['name'] == source_role['name']:
user['roleIds'].append(updated_dest_role['id'])
user['roleIds'].remove(source_role_id)
# print('Did not find role at target. Added role:' + source_role['name'])
# print('modified user: ' + str(user))
sumo.create_user(user['firstName'], user['lastName'], user['email'], user['roleIds'])
except Exception as e:
logger.exception(e)
self.mainwindow.errorbox('Something went wrong:\n\n' + str(e))
return
self.update_users_and_roles_lists(UserListWidget, RoleListWidget, url, id, key)
else:
self.mainwindow.errorbox("Please select at least one file to restore.")
return
else:
self.mainwindow.errorbox("Please update the directory list before restoring content")
return
def delete_user(self, RoleListWidget, UserListWidget, url, id, key):
logger.info("[Users and Roles]Deleting User(s)")
selecteditems = UserListWidget.selectedItems()
if len(selecteditems) > 0: # make sure something was selected
message = "You are about to delete the following item(s):\n\n"
for selecteditem in selecteditems:
message = message + str(selecteditem.text()) + "\n"
message = message + '''
This is exceedingly DANGEROUS!!!!
Please be VERY, VERY, VERY sure you want to do this!
You could lose quite a bit of work if you delete the wrong thing(s).
If you are absolutely sure, type "DELETE" in the box below.
'''
text, result = QtWidgets.QInputDialog.getText(self, 'Warning!!', message)
if (result and (str(text) == 'DELETE')):
try:
sumo = SumoLogic(id, key, endpoint=url, log_level=self.mainwindow.log_level)
for selecteditem in selecteditems:
item_id = selecteditem.details['id']
result = sumo.delete_user(item_id)
self.update_users_and_roles_lists(UserListWidget, RoleListWidget, url, id, key)
return
except Exception as e:
logger.exception(e)
self.mainwindow.errorbox('Something went wrong:\n\n' + str(e))
else:
self.mainwindow.errorbox('You need to select something before you can delete it.')
return
def copy_role(self, RoleListWidgetFrom, RoleListWidgetTo, UserListWidgetTo, fromurl, fromid, fromkey,
tourl, toid, tokey):
logger.info("[Users and Roles]Copying Role(s)")
try:
selecteditems = RoleListWidgetFrom.selectedItems()
if len(selecteditems) > 0: # make sure something was selected
fromsumo = SumoLogic(fromid, fromkey, endpoint=fromurl, log_level=self.mainwindow.log_level)
tosumo = SumoLogic(toid, tokey, endpoint=tourl, log_level=self.mainwindow.log_level)
for selecteditem in selecteditems:
role_id = selecteditem.details['id']
role = fromsumo.get_role(role_id)
status = tosumo.create_role(role)
self.update_users_and_roles_lists(UserListWidgetTo, RoleListWidgetTo, tourl, toid, tokey)
return
else:
self.mainwindow.errorbox('You have not made any selections.')
return
except Exception as e:
logger.exception(e)
self.mainwindow.errorbox('Something went wrong:' + str(e))
self.update_users_and_roles_lists(UserListWidgetTo, RoleListWidgetTo, tourl, toid, tokey)
return
def backup_role(self, RoleListWidget, url, id, key):
logger.info("[Users and Roles]Backing Up Role(s)")
selecteditems = RoleListWidget.selectedItems()
if len(selecteditems) > 0: # make sure something was selected
savepath = str(QtWidgets.QFileDialog.getExistingDirectory(self, "Select Backup Directory"))
if os.access(savepath, os.W_OK):
message = ''
sumo = SumoLogic(id, key, endpoint=url, log_level=self.mainwindow.log_level)
for selecteditem in selecteditems:
item_id = selecteditem.details['id']
try:
export = sumo.get_role(item_id)
savefilepath = pathlib.Path(savepath + r'/' + str(selecteditem.text()) + r'.role.json')
if savefilepath:
with savefilepath.open(mode='w') as filepointer:
json.dump(export, filepointer)
message = message + str(selecteditem.text()) + r'.json' + '\n'
except Exception as e:
logger.exception(e)
self.mainwindow.errorbox('Something went wrong:\n\n' + str(e))
return
self.mainwindow.infobox('Wrote files: \n\n' + message)
else:
self.mainwindow.errorbox("You don't have permissions to write to that directory")
else:
self.mainwindow.errorbox('No content selected.')
return
def view_role_json(self, RoleListWidget, url, id, key):
logger.info("[Users and Roles]Viewing Roles(s) JSON")
selecteditems = RoleListWidget.selectedItems()
if len(selecteditems) > 0: # make sure something was selected
try:
sumo = SumoLogic(id, key, endpoint=url, log_level=self.mainwindow.log_level)
json_text = ''
for selecteditem in selecteditems:
role_id = selecteditem.details['id']
role = sumo.get_role(role_id)
json_text = json_text + json.dumps(role, indent=4, sort_keys=True) + '\n\n'
self.json_window = ShowTextDialog('JSON', json_text, self.mainwindow.basedir)
self.json_window.show()
except Exception as e:
logger.exception(e)
self.mainwindow.errorbox('Something went wrong:\n\n' + str(e))
return
else:
self.mainwindow.errorbox('No role selected.')
return
def restore_role(self, RoleListWidget, UserListWidget, url, id, key):
logger.info("[Users and Roles]Restoring Role(s)")
if RoleListWidget.updated == True:
filter = "JSON (*.json)"
filelist, status = QtWidgets.QFileDialog.getOpenFileNames(self, "Open file(s)...", os.getcwd(),
filter)
if len(filelist) > 0:
sumo = SumoLogic(id, key, endpoint=url, log_level=self.mainwindow.log_level)
for file in filelist:
try:
with open(file) as filepointer:
role_backup = json.load(filepointer)
except Exception as e:
logger.exception(e)
self.mainwindow.errorbox(
"Something went wrong reading the file. Do you have the right file permissions? Does it contain valid JSON?")
return
try:
role_backup['users'] = []
status = sumo.create_role(role_backup)
except Exception as e:
logger.exception(e)
self.mainwindow.errorbox('Something went wrong:\n\n' + str(e))
return
self.update_users_and_roles_lists(UserListWidget, RoleListWidget, url, id, key)
else:
self.mainwindow.errorbox("Please select at least one file to restore.")
return
else:
self.mainwindow.errorbox("Please update the directory list before restoring content")
return
def delete_role(self, RoleListWidget, UserListWidget, url, id, key):
logger.info("[Users and Roles]Deleting Role(s)")
selecteditems = RoleListWidget.selectedItems()
if len(selecteditems) > 0: # make sure something was selected
message = "You are about to delete the following item(s):\n\n"
for selecteditem in selecteditems:
message = message + str(selecteditem.text()) + "\n"
message = message + '''
This is exceedingly DANGEROUS!!!!
Please be VERY, VERY, VERY sure you want to do this!
You could lose quite a bit of work if you delete the wrong thing(s).
If you are absolutely sure, type "DELETE" in the box below.
'''
text, result = QtWidgets.QInputDialog.getText(self, 'Warning!!', message)
if (result and (str(text) == 'DELETE')):
try:
sumo = SumoLogic(id, key, endpoint=url, log_level=self.mainwindow.log_level)
for selecteditem in selecteditems:
item_id = selecteditem.details['id']
result = sumo.delete_role(item_id)
self.update_users_and_roles_lists(UserListWidget, RoleListWidget, url, id, key)
return
except Exception as e:
logger.exception(e)
self.mainwindow.errorbox('Something went wrong:\n\n' + str(e))
else:
self.mainwindow.errorbox('You need to select something before you can delete it.')
return
| 48.340524
| 137
| 0.597966
| 2,929
| 31,373
| 6.298054
| 0.100376
| 0.115358
| 0.071882
| 0.042283
| 0.815363
| 0.80322
| 0.791999
| 0.754703
| 0.728357
| 0.720984
| 0
| 0.000555
| 0.310936
| 31,373
| 648
| 138
| 48.415123
| 0.852762
| 0.021611
| 0
| 0.739209
| 0
| 0.003597
| 0.084395
| 0.00075
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026978
| false
| 0.046763
| 0.016187
| 0
| 0.097122
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c92ac74a41c6d27584a5d7d7e530d1077058597
| 2,385
|
py
|
Python
|
datasets/few_shot_test_pickle.py
|
PengWan-Yang/few-shot-transformer
|
c055239061744124c72960420cd4037495952b6d
|
[
"Apache-2.0"
] | 4
|
2022-02-06T19:51:19.000Z
|
2022-03-15T21:19:23.000Z
|
datasets/few_shot_test_pickle.py
|
PengWan-Yang/few-shot-transformer
|
c055239061744124c72960420cd4037495952b6d
|
[
"Apache-2.0"
] | 1
|
2022-02-06T20:00:15.000Z
|
2022-02-06T20:00:15.000Z
|
datasets/few_shot_test_pickle.py
|
PengWan-Yang/few-shot-transformer
|
c055239061744124c72960420cd4037495952b6d
|
[
"Apache-2.0"
] | null | null | null |
import pickle
# modify validation data
_few_shot_pickle_file = 'few_shot_test_data.pkl'
_few_shot_file = open(_few_shot_pickle_file, 'rb')
data_few_shot = pickle.load(_few_shot_file)
_few_shot_pickle_file = 'few_shot_val_data.pkl'
_few_shot_file = open(_few_shot_pickle_file, 'rb')
data_val = pickle.load(_few_shot_file)
_few_shot_pickle_file = 'few_shot_train_data.pkl'
_few_shot_file = open(_few_shot_pickle_file, 'rb')
data_train = pickle.load(_few_shot_file)
raise 1
for _list in data_few_shot:
for _video in _list:
_video['fg_name'] = _video['fg_name'].replace('/home/tao/dataset/v1-3/train_val_frames_3',
'datasets/activitynet13')
_video['bg_name'] = _video['bg_name'].replace('/home/tao/dataset/v1-3/train_val_frames_3',
'datasets/activitynet13')
pickle.dump(data_few_shot, open(_few_shot_pickle_file, "wb"))
print("done")
# modify testing data
_few_shot_pickle_file = 'few_shot_test_data.pkl'
_few_shot_file = open(_few_shot_pickle_file, 'rb')
data_few_shot = pickle.load(_few_shot_file)
for _list in data_few_shot:
for _video in _list:
_video['fg_name'] = _video['fg_name'].replace('/home/tao/dataset/v1-3/train_val_frames_3',
'datasets/activitynet13')
_video['bg_name'] = _video['bg_name'].replace('/home/tao/dataset/v1-3/train_val_frames_3',
'datasets/activitynet13')
pickle.dump(data_few_shot, open(_few_shot_pickle_file, "wb"))
print("done")
# modify training data
_few_shot_pickle_file = 'few_shot_train_data.pkl'
_few_shot_file = open(_few_shot_pickle_file, 'rb')
data_few_shot = pickle.load(_few_shot_file)
index = 0
for k, _list in data_few_shot.items():
for _video in _list:
_video['video_id'] = "query_{:0>5d}".format(index)
_video['fg_name'] = _video['fg_name'].replace('dataset/activitynet13/train_val_frames_3',
'datasets/activitynet13')
_video['bg_name'] = _video['bg_name'].replace('dataset/activitynet13/train_val_frames_3',
'datasets/activitynet13')
index = index + 1
pickle.dump(data_few_shot, open(_few_shot_pickle_file, "wb"))
print("done")
| 37.857143
| 98
| 0.65283
| 327
| 2,385
| 4.238532
| 0.143731
| 0.186869
| 0.150072
| 0.159452
| 0.919192
| 0.878066
| 0.878066
| 0.862193
| 0.862193
| 0.862193
| 0
| 0.019105
| 0.231866
| 2,385
| 62
| 99
| 38.467742
| 0.737445
| 0.026415
| 0
| 0.75
| 0
| 0
| 0.267703
| 0.210276
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.022727
| 0
| 0.022727
| 0.068182
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ca19eadb115712fb3c48ed0a589480fef063fda
| 27,687
|
py
|
Python
|
tests/test_home.py
|
jeroenterheerdt/nexia
|
93ff554913e1dad6389b54179eca7c4ec1f29371
|
[
"Apache-2.0"
] | null | null | null |
tests/test_home.py
|
jeroenterheerdt/nexia
|
93ff554913e1dad6389b54179eca7c4ec1f29371
|
[
"Apache-2.0"
] | null | null | null |
tests/test_home.py
|
jeroenterheerdt/nexia
|
93ff554913e1dad6389b54179eca7c4ec1f29371
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for Nexia Home."""
import json
import os
from os.path import dirname
import unittest
import pytest
from nexia.home import NexiaHome
def load_fixture(filename):
"""Load a fixture."""
test_dir = dirname(__file__)
path = os.path.join(test_dir, "fixtures", filename)
with open(path) as fptr:
return fptr.read()
class TestNexiaThermostat(unittest.TestCase):
"""Tests for nexia thermostat."""
def test_update(self):
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_houses_123456.json"))
nexia.update_from_json(devices_json)
thermostat = nexia.get_thermostat_by_id(2059661)
zone_ids = thermostat.get_zone_ids()
self.assertEqual(zone_ids, [83261002, 83261005, 83261008, 83261011])
nexia.update_from_json(devices_json)
zone_ids = thermostat.get_zone_ids()
self.assertEqual(zone_ids, [83261002, 83261005, 83261008, 83261011])
nexia.update_from_json(devices_json)
def test_idle_thermo(self):
"""Get methods for an idle thermostat."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_houses_123456.json"))
nexia.update_from_json(devices_json)
thermostat = nexia.get_thermostat_by_id(2059661)
self.assertEqual(thermostat.get_model(), "XL1050")
self.assertEqual(thermostat.get_firmware(), "5.9.1")
self.assertEqual(thermostat.get_dev_build_number(), "1581321824")
self.assertEqual(thermostat.get_device_id(), "000000")
self.assertEqual(thermostat.get_type(), "XL1050")
self.assertEqual(thermostat.get_name(), "Downstairs East Wing")
self.assertEqual(thermostat.get_deadband(), 3)
self.assertEqual(thermostat.get_setpoint_limits(), (55, 99))
self.assertEqual(thermostat.get_variable_fan_speed_limits(), (0.35, 1.0))
self.assertEqual(thermostat.get_unit(), "F")
self.assertEqual(thermostat.get_humidity_setpoint_limits(), (0.35, 0.65))
self.assertEqual(thermostat.get_fan_mode(), "Auto")
self.assertEqual(thermostat.get_fan_modes(), ["Auto", "On", "Circulate"])
self.assertEqual(thermostat.get_outdoor_temperature(), 88.0)
self.assertEqual(thermostat.get_relative_humidity(), 0.36)
self.assertEqual(thermostat.get_current_compressor_speed(), 0.0)
self.assertEqual(thermostat.get_requested_compressor_speed(), 0.0)
self.assertEqual(thermostat.get_fan_speed_setpoint(), 0.35)
self.assertEqual(thermostat.get_dehumidify_setpoint(), 0.50)
self.assertEqual(thermostat.has_dehumidify_support(), True)
self.assertEqual(thermostat.has_dehumidify_support(), True)
self.assertEqual(thermostat.has_emergency_heat(), False)
self.assertEqual(thermostat.get_system_status(), "System Idle")
self.assertEqual(thermostat.has_air_cleaner(), True)
self.assertEqual(thermostat.get_air_cleaner_mode(), "auto")
self.assertEqual(thermostat.is_blower_active(), False)
zone_ids = thermostat.get_zone_ids()
self.assertEqual(zone_ids, [83261002, 83261005, 83261008, 83261011])
def test_idle_thermo_issue_33758(self):
"""Get methods for an idle thermostat."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_house_issue_33758.json"))
nexia.update_from_json(devices_json)
thermostat = nexia.get_thermostat_by_id(12345678)
self.assertEqual(thermostat.get_model(), "XL1050")
self.assertEqual(thermostat.get_firmware(), "5.9.1")
self.assertEqual(thermostat.get_dev_build_number(), "1581321824")
self.assertEqual(thermostat.get_device_id(), "xxxxxx")
self.assertEqual(thermostat.get_type(), "XL1050")
self.assertEqual(thermostat.get_name(), "Thermostat")
self.assertEqual(thermostat.get_deadband(), 3)
self.assertEqual(thermostat.get_setpoint_limits(), (55, 99))
self.assertEqual(thermostat.get_variable_fan_speed_limits(), (0.35, 1.0))
self.assertEqual(thermostat.get_unit(), "F")
self.assertEqual(thermostat.get_humidity_setpoint_limits(), (0.35, 0.65))
self.assertEqual(thermostat.get_fan_mode(), "Auto")
self.assertEqual(thermostat.get_fan_modes(), ["Auto", "On", "Circulate"])
self.assertEqual(thermostat.get_outdoor_temperature(), 55.0)
self.assertEqual(thermostat.get_relative_humidity(), 0.43)
self.assertEqual(thermostat.get_current_compressor_speed(), 0.0)
self.assertEqual(thermostat.get_requested_compressor_speed(), 0.0)
self.assertEqual(thermostat.get_fan_speed_setpoint(), 1)
self.assertEqual(thermostat.get_dehumidify_setpoint(), 0.55)
self.assertEqual(thermostat.has_dehumidify_support(), True)
self.assertEqual(thermostat.has_humidify_support(), True)
self.assertEqual(thermostat.has_emergency_heat(), True)
self.assertEqual(thermostat.is_emergency_heat_active(), False)
self.assertEqual(thermostat.get_system_status(), "System Idle")
self.assertEqual(thermostat.has_air_cleaner(), True)
self.assertEqual(thermostat.get_air_cleaner_mode(), "auto")
self.assertEqual(thermostat.is_blower_active(), False)
zone_ids = thermostat.get_zone_ids()
self.assertEqual(zone_ids, [12345678])
def test_idle_thermo_issue_33968_thermostat_1690380(self):
"""Get methods for an cooling thermostat."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_house_issue_33968.json"))
nexia.update_from_json(devices_json)
thermostat_ids = nexia.get_thermostat_ids()
self.assertEqual(thermostat_ids, [1690380])
thermostat = nexia.get_thermostat_by_id(1690380)
zone_ids = thermostat.get_zone_ids()
self.assertEqual(zone_ids, [83037337, 83037340, 83037343])
self.assertEqual(thermostat.get_model(), "XL1050")
self.assertEqual(thermostat.get_firmware(), "5.9.1")
self.assertEqual(thermostat.get_dev_build_number(), "1581321824")
self.assertEqual(thermostat.get_device_id(), "removed")
self.assertEqual(thermostat.get_type(), "XL1050")
self.assertEqual(thermostat.get_name(), "Thermostat")
self.assertEqual(thermostat.get_deadband(), 3)
self.assertEqual(thermostat.get_setpoint_limits(), (55, 99))
self.assertEqual(thermostat.get_variable_fan_speed_limits(), (0.35, 1.0))
self.assertEqual(thermostat.get_unit(), "F")
self.assertEqual(thermostat.get_humidity_setpoint_limits(), (0.35, 0.65))
self.assertEqual(thermostat.get_fan_mode(), "Auto")
self.assertEqual(thermostat.get_fan_modes(), ["Auto", "On", "Circulate"])
self.assertEqual(thermostat.get_outdoor_temperature(), 80.0)
self.assertEqual(thermostat.get_relative_humidity(), 0.55)
self.assertEqual(thermostat.get_current_compressor_speed(), 0.41)
self.assertEqual(thermostat.get_requested_compressor_speed(), 0.41)
self.assertEqual(thermostat.get_fan_speed_setpoint(), 0.5)
self.assertEqual(thermostat.get_dehumidify_setpoint(), 0.55)
self.assertEqual(thermostat.has_dehumidify_support(), True)
self.assertEqual(thermostat.has_humidify_support(), False)
self.assertEqual(thermostat.has_emergency_heat(), True)
self.assertEqual(thermostat.is_emergency_heat_active(), False)
self.assertEqual(thermostat.get_system_status(), "Cooling")
self.assertEqual(thermostat.has_air_cleaner(), True)
self.assertEqual(thermostat.get_air_cleaner_mode(), "auto")
self.assertEqual(thermostat.is_blower_active(), True)
def test_active_thermo(self):
"""Get methods for an active thermostat."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_houses_123456.json"))
nexia.update_from_json(devices_json)
thermostat = nexia.get_thermostat_by_id(2293892)
self.assertEqual(thermostat.get_model(), "XL1050")
self.assertEqual(thermostat.get_firmware(), "5.9.1")
self.assertEqual(thermostat.get_dev_build_number(), "1581321824")
self.assertEqual(thermostat.get_device_id(), "0281B02C")
self.assertEqual(thermostat.get_type(), "XL1050")
self.assertEqual(thermostat.get_name(), "Master Suite")
self.assertEqual(thermostat.get_deadband(), 3)
self.assertEqual(thermostat.get_setpoint_limits(), (55, 99))
self.assertEqual(thermostat.get_variable_fan_speed_limits(), (0.35, 1.0))
self.assertEqual(thermostat.get_unit(), "F")
self.assertEqual(thermostat.get_humidity_setpoint_limits(), (0.35, 0.65))
self.assertEqual(thermostat.get_fan_mode(), "Auto")
self.assertEqual(thermostat.get_fan_modes(), ["Auto", "On", "Circulate"])
self.assertEqual(thermostat.get_outdoor_temperature(), 87.0)
self.assertEqual(thermostat.get_relative_humidity(), 0.52)
self.assertEqual(thermostat.get_current_compressor_speed(), 0.69)
self.assertEqual(thermostat.get_requested_compressor_speed(), 0.69)
self.assertEqual(thermostat.get_fan_speed_setpoint(), 0.35)
self.assertEqual(thermostat.get_dehumidify_setpoint(), 0.45)
self.assertEqual(thermostat.has_dehumidify_support(), True)
self.assertEqual(thermostat.has_humidify_support(), False)
self.assertEqual(thermostat.has_emergency_heat(), False)
self.assertEqual(thermostat.get_system_status(), "Cooling")
self.assertEqual(thermostat.has_air_cleaner(), True)
self.assertEqual(thermostat.get_air_cleaner_mode(), "auto")
self.assertEqual(thermostat.is_blower_active(), True)
zone_ids = thermostat.get_zone_ids()
self.assertEqual(zone_ids, [83394133, 83394130, 83394136, 83394127, 83394139])
@pytest.mark.skip(reason="not yet supported")
def test_xl624(self):
"""Get methods for an xl624 thermostat."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_house_xl624.json"))
nexia.update_from_json(devices_json)
thermostat_ids = nexia.get_thermostat_ids()
self.assertEqual(thermostat_ids, [2222222, 3333333])
thermostat = nexia.get_thermostat_by_id(1111111)
self.assertEqual(thermostat.get_model(), None)
self.assertEqual(thermostat.get_firmware(), "2.8")
self.assertEqual(thermostat.get_dev_build_number(), "0603340208")
self.assertEqual(thermostat.get_device_id(), None)
self.assertEqual(thermostat.get_type(), None)
self.assertEqual(thermostat.get_name(), "Downstairs Hall")
self.assertEqual(thermostat.get_deadband(), 3)
self.assertEqual(thermostat.get_setpoint_limits(), (55, 99))
self.assertEqual(thermostat.has_variable_fan_speed(), False)
self.assertEqual(thermostat.get_unit(), "F")
self.assertEqual(thermostat.get_humidity_setpoint_limits(), (0.35, 0.65))
self.assertEqual(thermostat.get_fan_mode(), "Auto")
self.assertEqual(thermostat.get_fan_modes(), ["Auto", "On", "Cycler"])
self.assertEqual(thermostat.get_current_compressor_speed(), 0.0)
self.assertEqual(thermostat.get_requested_compressor_speed(), 0.0)
self.assertEqual(thermostat.has_dehumidify_support(), False)
self.assertEqual(thermostat.has_humidify_support(), False)
self.assertEqual(thermostat.has_emergency_heat(), False)
self.assertEqual(thermostat.get_system_status(), "System Idle")
self.assertEqual(thermostat.has_air_cleaner(), False)
self.assertEqual(thermostat.is_blower_active(), False)
zone_ids = thermostat.get_zone_ids()
self.assertEqual(zone_ids, [12345678])
def test_xl824_1(self):
"""Get methods for an xl824 thermostat."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_house_xl624.json"))
nexia.update_from_json(devices_json)
thermostat_ids = nexia.get_thermostat_ids()
self.assertEqual(thermostat_ids, [2222222, 3333333])
thermostat = nexia.get_thermostat_by_id(2222222)
self.assertEqual(thermostat.get_model(), "XL824")
self.assertEqual(thermostat.get_firmware(), "5.9.1")
self.assertEqual(thermostat.get_dev_build_number(), "1581314625")
self.assertEqual(thermostat.get_device_id(), "0167CA48")
self.assertEqual(thermostat.get_type(), "XL824")
self.assertEqual(thermostat.get_name(), "Family Room")
self.assertEqual(thermostat.get_deadband(), 3)
self.assertEqual(thermostat.get_setpoint_limits(), (55, 99))
self.assertEqual(thermostat.has_variable_fan_speed(), True)
self.assertEqual(thermostat.get_unit(), "F")
self.assertEqual(thermostat.get_humidity_setpoint_limits(), (0.35, 0.65))
self.assertEqual(thermostat.get_fan_mode(), "Circulate")
self.assertEqual(thermostat.get_fan_modes(), ["Auto", "On", "Circulate"])
self.assertEqual(thermostat.get_current_compressor_speed(), 0.0)
self.assertEqual(thermostat.get_requested_compressor_speed(), 0.0)
self.assertEqual(thermostat.has_dehumidify_support(), True)
self.assertEqual(thermostat.has_humidify_support(), False)
self.assertEqual(thermostat.has_emergency_heat(), False)
self.assertEqual(thermostat.get_system_status(), "System Idle")
self.assertEqual(thermostat.has_air_cleaner(), True)
self.assertEqual(thermostat.is_blower_active(), False)
zone_ids = thermostat.get_zone_ids()
self.assertEqual(zone_ids, [88888888])
def test_xl824_2(self):
"""Get methods for an xl824 thermostat."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_house_xl624.json"))
nexia.update_from_json(devices_json)
thermostat_ids = nexia.get_thermostat_ids()
self.assertEqual(thermostat_ids, [2222222, 3333333])
thermostat = nexia.get_thermostat_by_id(3333333)
self.assertEqual(thermostat.get_model(), "XL824")
self.assertEqual(thermostat.get_firmware(), "5.9.1")
self.assertEqual(thermostat.get_dev_build_number(), "1581314625")
self.assertEqual(thermostat.get_device_id(), "01573380")
self.assertEqual(thermostat.get_type(), "XL824")
self.assertEqual(thermostat.get_name(), "Upstairs")
self.assertEqual(thermostat.get_deadband(), 3)
self.assertEqual(thermostat.get_setpoint_limits(), (55, 99))
self.assertEqual(thermostat.has_variable_fan_speed(), True)
self.assertEqual(thermostat.get_unit(), "F")
self.assertEqual(thermostat.get_humidity_setpoint_limits(), (0.35, 0.65))
self.assertEqual(thermostat.get_fan_mode(), "Circulate")
self.assertEqual(thermostat.get_fan_modes(), ["Auto", "On", "Circulate"])
self.assertEqual(thermostat.get_current_compressor_speed(), 0.0)
self.assertEqual(thermostat.get_requested_compressor_speed(), 0.0)
self.assertEqual(thermostat.has_dehumidify_support(), True)
self.assertEqual(thermostat.has_humidify_support(), False)
self.assertEqual(thermostat.has_emergency_heat(), False)
self.assertEqual(thermostat.get_system_status(), "System Idle")
self.assertEqual(thermostat.has_air_cleaner(), True)
self.assertEqual(thermostat.is_blower_active(), False)
zone_ids = thermostat.get_zone_ids()
self.assertEqual(zone_ids, [99999999])
class TestNexiaHome(unittest.TestCase):
"""Tests for nexia home."""
def test_basic(self):
"""Basic tests for NexiaHome."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_houses_123456.json"))
nexia.update_from_json(devices_json)
self.assertEqual(nexia.get_name(), "Hidden")
thermostat_ids = nexia.get_thermostat_ids()
self.assertEqual(thermostat_ids, [2059661, 2059676, 2293892, 2059652])
def test_basic_issue_33758(self):
"""Basic tests for NexiaHome."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_house_issue_33758.json"))
nexia.update_from_json(devices_json)
self.assertEqual(nexia.get_name(), "Hidden")
thermostat_ids = nexia.get_thermostat_ids()
self.assertEqual(thermostat_ids, [12345678])
class TestNexiaThermostatZone(unittest.TestCase):
"""Tests for nexia thermostat zone."""
def test_zone_issue_33968_zone_83037337(self):
"""Tests for nexia thermostat zone that is cooling."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_house_issue_33968.json"))
nexia.update_from_json(devices_json)
thermostat = nexia.get_thermostat_by_id(1690380)
zone = thermostat.get_zone_by_id(83037337)
self.assertEqual(zone.thermostat, thermostat)
self.assertEqual(zone.get_name(), "Family Room")
self.assertEqual(zone.get_cooling_setpoint(), 77)
self.assertEqual(zone.get_heating_setpoint(), 74)
self.assertEqual(zone.get_current_mode(), "COOL")
self.assertEqual(
zone.get_requested_mode(), "COOL",
)
self.assertEqual(
zone.get_presets(), ["None", "Home", "Away", "Sleep"],
)
self.assertEqual(
zone.get_preset(), "None",
)
self.assertEqual(
zone.get_status(), "Damper Closed",
)
self.assertEqual(
zone.get_setpoint_status(), "Permanent Hold",
)
self.assertEqual(zone.is_calling(), False)
self.assertEqual(zone.is_in_permanent_hold(), True)
def test_zone_issue_33968_zone_83037340(self):
"""Tests for nexia thermostat zone that is cooling."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_house_issue_33968.json"))
nexia.update_from_json(devices_json)
thermostat = nexia.get_thermostat_by_id(1690380)
zone = thermostat.get_zone_by_id(83037340)
self.assertEqual(zone.thermostat, thermostat)
self.assertEqual(zone.get_name(), "Office")
self.assertEqual(zone.get_cooling_setpoint(), 77)
self.assertEqual(zone.get_heating_setpoint(), 74)
self.assertEqual(zone.get_current_mode(), "COOL")
self.assertEqual(
zone.get_requested_mode(), "COOL",
)
self.assertEqual(
zone.get_presets(), ["None", "Home", "Away", "Sleep"],
)
self.assertEqual(
zone.get_preset(), "None",
)
self.assertEqual(
zone.get_status(), "Damper Open",
)
self.assertEqual(
zone.get_setpoint_status(), "Permanent Hold",
)
self.assertEqual(zone.is_calling(), True)
self.assertEqual(zone.is_in_permanent_hold(), True)
def test_zone_issue_33968_zone_83037343(self):
"""Tests for nexia thermostat zone that is cooling."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_house_issue_33968.json"))
nexia.update_from_json(devices_json)
thermostat = nexia.get_thermostat_by_id(1690380)
zone = thermostat.get_zone_by_id(83037343)
self.assertEqual(zone.thermostat, thermostat)
self.assertEqual(zone.get_name(), "Master")
self.assertEqual(zone.get_cooling_setpoint(), 77)
self.assertEqual(zone.get_heating_setpoint(), 68)
self.assertEqual(zone.get_current_mode(), "COOL")
self.assertEqual(
zone.get_requested_mode(), "COOL",
)
self.assertEqual(
zone.get_presets(), ["None", "Home", "Away", "Sleep"],
)
self.assertEqual(
zone.get_preset(), "None",
)
self.assertEqual(
zone.get_status(), "Damper Open",
)
self.assertEqual(
zone.get_setpoint_status(), "Permanent Hold",
)
self.assertEqual(zone.is_calling(), True)
self.assertEqual(zone.is_in_permanent_hold(), True)
def test_zone_issue_33758(self):
"""Tests for nexia thermostat zone relieving air."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_house_issue_33758.json"))
nexia.update_from_json(devices_json)
thermostat = nexia.get_thermostat_by_id(12345678)
zone = thermostat.get_zone_by_id(12345678)
self.assertEqual(zone.thermostat, thermostat)
self.assertEqual(zone.get_name(), "Thermostat NativeZone")
self.assertEqual(zone.get_cooling_setpoint(), 73)
self.assertEqual(zone.get_heating_setpoint(), 68)
self.assertEqual(zone.get_current_mode(), "AUTO")
self.assertEqual(
zone.get_requested_mode(), "AUTO",
)
self.assertEqual(
zone.get_presets(), ["None", "Home", "Away", "Sleep"],
)
self.assertEqual(
zone.get_preset(), "None",
)
self.assertEqual(
zone.get_status(), "Idle",
)
self.assertEqual(
zone.get_setpoint_status(), "Run Schedule - None",
)
self.assertEqual(zone.is_calling(), False)
self.assertEqual(zone.is_in_permanent_hold(), False)
def test_zone_relieving_air(self):
"""Tests for nexia thermostat zone relieving air."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_houses_123456.json"))
nexia.update_from_json(devices_json)
thermostat = nexia.get_thermostat_by_id(2293892)
zone = thermostat.get_zone_by_id(83394133)
self.assertEqual(zone.thermostat, thermostat)
self.assertEqual(zone.get_name(), "Bath Closet")
self.assertEqual(zone.get_cooling_setpoint(), 79)
self.assertEqual(zone.get_heating_setpoint(), 63)
self.assertEqual(zone.get_current_mode(), "AUTO")
self.assertEqual(
zone.get_requested_mode(), "AUTO",
)
self.assertEqual(
zone.get_presets(), ["None", "Home", "Away", "Sleep"],
)
self.assertEqual(
zone.get_preset(), "None",
)
self.assertEqual(
zone.get_status(), "Relieving Air",
)
self.assertEqual(
zone.get_setpoint_status(), "Permanent Hold",
)
self.assertEqual(zone.is_calling(), True)
self.assertEqual(zone.is_in_permanent_hold(), True)
def test_zone_cooling_air(self):
"""Tests for nexia thermostat zone cooling."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_houses_123456.json"))
nexia.update_from_json(devices_json)
thermostat = nexia.get_thermostat_by_id(2293892)
zone = thermostat.get_zone_by_id(83394130)
self.assertEqual(zone.get_name(), "Master")
self.assertEqual(zone.get_cooling_setpoint(), 71)
self.assertEqual(zone.get_heating_setpoint(), 63)
self.assertEqual(zone.get_current_mode(), "AUTO")
self.assertEqual(
zone.get_requested_mode(), "AUTO",
)
self.assertEqual(
zone.get_presets(), ["None", "Home", "Away", "Sleep"],
)
self.assertEqual(
zone.get_preset(), "None",
)
self.assertEqual(
zone.get_status(), "Damper Open",
)
self.assertEqual(
zone.get_setpoint_status(), "Permanent Hold",
)
self.assertEqual(zone.is_calling(), True)
self.assertEqual(zone.is_in_permanent_hold(), True)
def test_zone_idle(self):
"""Tests for nexia thermostat zone idle."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_houses_123456.json"))
nexia.update_from_json(devices_json)
thermostat = nexia.get_thermostat_by_id(2059661)
zone = thermostat.get_zone_by_id(83261002)
self.assertEqual(zone.get_name(), "Living East")
self.assertEqual(zone.get_cooling_setpoint(), 79)
self.assertEqual(zone.get_heating_setpoint(), 63)
self.assertEqual(zone.get_current_mode(), "AUTO")
self.assertEqual(
zone.get_requested_mode(), "AUTO",
)
self.assertEqual(
zone.get_presets(), ["None", "Home", "Away", "Sleep"],
)
self.assertEqual(
zone.get_preset(), "None",
)
self.assertEqual(
zone.get_status(), "Idle",
)
self.assertEqual(
zone.get_setpoint_status(), "Permanent Hold",
)
self.assertEqual(zone.is_calling(), False)
self.assertEqual(zone.is_in_permanent_hold(), True)
def test_xl824_idle(self):
"""Tests for nexia xl824 zone idle."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_house_xl624.json"))
nexia.update_from_json(devices_json)
thermostat_ids = nexia.get_thermostat_ids()
self.assertEqual(thermostat_ids, [2222222, 3333333])
thermostat = nexia.get_thermostat_by_id(3333333)
zone = thermostat.get_zone_by_id(99999999)
self.assertEqual(zone.get_name(), "Upstairs NativeZone")
self.assertEqual(zone.get_cooling_setpoint(), 74)
self.assertEqual(zone.get_heating_setpoint(), 62)
self.assertEqual(zone.get_current_mode(), "COOL")
self.assertEqual(
zone.get_requested_mode(), "COOL",
)
self.assertEqual(
zone.get_presets(), ["None", "Home", "Away", "Sleep"],
)
self.assertEqual(
zone.get_preset(), "None",
)
self.assertEqual(
zone.get_status(), "Idle",
)
self.assertEqual(
zone.get_setpoint_status(), "Permanent Hold",
)
self.assertEqual(zone.is_calling(), False)
self.assertEqual(zone.is_in_permanent_hold(), True)
class TestNexiaAutomation(unittest.TestCase):
def test_automations(self):
"""Get methods for an active thermostat."""
nexia = NexiaHome(auto_login=False)
devices_json = json.loads(load_fixture("mobile_houses_123456.json"))
nexia.update_from_json(devices_json)
automation_ids = nexia.get_automation_ids()
self.assertEqual(
automation_ids,
[3467876, 3467870, 3452469, 3452472, 3454776, 3454774, 3486078, 3486091],
)
automation_one = nexia.get_automation_by_id(3467876)
self.assertEqual(automation_one.name, "Away for 12 Hours")
self.assertEqual(
automation_one.description,
"When IFTTT activates the automation Upstairs West Wing will "
"permanently hold the heat to 62.0 and cool to 83.0 AND "
"Downstairs East Wing will permanently hold the heat to 62.0 "
"and cool to 83.0 AND Downstairs West Wing will permanently "
"hold the heat to 62.0 and cool to 83.0 AND Activate the mode "
"named 'Away 12' AND Master Suite will permanently hold the "
"heat to 62.0 and cool to 83.0",
)
self.assertEqual(automation_one.enabled, True)
self.assertEqual(automation_one.automation_id, 3467876)
| 44.946429
| 86
| 0.674757
| 3,182
| 27,687
| 5.599309
| 0.073224
| 0.239939
| 0.246955
| 0.202728
| 0.920189
| 0.902565
| 0.882079
| 0.872369
| 0.84498
| 0.84498
| 0
| 0.050902
| 0.205295
| 27,687
| 615
| 87
| 45.019512
| 0.758851
| 0.029797
| 0
| 0.688109
| 0
| 0
| 0.073843
| 0.018507
| 0
| 0
| 0
| 0
| 0.555556
| 1
| 0.038986
| false
| 0
| 0.011696
| 0
| 0.060429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0ca3cc7e85961f379dcec8f7f5d9db60fd5df51d
| 138,423
|
py
|
Python
|
dlkit/abstract_osid/calendaring/queries.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/abstract_osid/calendaring/queries.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/abstract_osid/calendaring/queries.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Implementations of calendaring abstract base class queries."""
# pylint: disable=invalid-name
# Method names comply with OSID specification.
# pylint: disable=no-init
# Abstract classes do not define __init__.
# pylint: disable=too-few-public-methods
# Some interfaces are specified as 'markers' and include no methods.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
# pylint: disable=too-many-arguments
# Argument signature defined in specification.
# pylint: disable=duplicate-code
# All apparent duplicates have been inspected. They aren't.
import abc
class EventQuery:
"""This is the query for searching events.
Each method match request produces an ``AND`` term while multiple
invocations of a method produces a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_implicit(self, match):
"""Matches an event that is implicitly generated.
:param match: ``true`` to match events implicitly generated, ``false`` to match events explicitly defined
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_implicit_terms(self):
"""Clears the implcit terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
implicit_terms = property(fdel=clear_implicit_terms)
@abc.abstractmethod
def match_duration(self, low, high, match):
"""Matches the event duration between the given range inclusive.
:param low: low duration range
:type low: ``osid.calendaring.Duration``
:param high: high duration range
:type high: ``osid.calendaring.Duration``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``high`` is less than ``low``
:raise: ``NullArgument`` -- ``high`` or ``low`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_duration(self, match):
"""Matches an event that has any duration.
:param match: ``true`` to match events with any duration, ``false`` to match events with no start time
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_duration_terms(self):
"""Clears the duration terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
duration_terms = property(fdel=clear_duration_terms)
@abc.abstractmethod
def match_recurring_event_id(self, recurring_event_id, match):
"""Matches events that related to the recurring event.
:param recurring_event_id: an ``Id`` for a recurring event
:type recurring_event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``recurring_event_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_recurring_event_id_terms(self):
"""Clears the recurring event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
recurring_event_id_terms = property(fdel=clear_recurring_event_id_terms)
@abc.abstractmethod
def supports_recurring_event_query(self):
"""Tests if a ``RecurringEventQuery`` is available for querying recurring events.
:return: ``true`` if a recurring event query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_recurring_event_query(self):
"""Gets the query for a recurring event.
Multiple retrievals produce a nested ``OR`` term.
:return: the recurring event query
:rtype: ``osid.calendaring.RecurringEventQuery``
:raise: ``Unimplemented`` -- ``supports_recurring_event_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_recurring_event_query()`` is ``true``.*
"""
return # osid.calendaring.RecurringEventQuery
recurring_event_query = property(fget=get_recurring_event_query)
@abc.abstractmethod
def match_any_recurring_event(self, match):
"""Matches an event that is part of any recurring event.
:param match: ``true`` to match events part of any recurring event, ``false`` to match only standalone events
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_recurring_event_terms(self):
"""Clears the recurring event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
recurring_event_terms = property(fdel=clear_recurring_event_terms)
@abc.abstractmethod
def match_superseding_event_id(self, superseding_event_id, match):
"""Matches events that relate to the superseding event.
:param superseding_event_id: an ``Id`` for a superseding event
:type superseding_event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_superseding_event_id_terms(self):
"""Clears the superseding events type terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
superseding_event_id_terms = property(fdel=clear_superseding_event_id_terms)
@abc.abstractmethod
def supports_superseding_event_query(self):
"""Tests if a ``SupersedingEventQuery`` is available for querying offset events.
:return: ``true`` if a superseding event query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_superseding_event_query(self):
"""Gets the query for a superseding event.
Multiple retrievals produce a nested ``OR`` term.
:return: the superseding event query
:rtype: ``osid.calendaring.SupersedingEventQuery``
:raise: ``Unimplemented`` -- ``supports_superseding_event_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_superseding_event_query()`` is ``true``.*
"""
return # osid.calendaring.SupersedingEventQuery
superseding_event_query = property(fget=get_superseding_event_query)
@abc.abstractmethod
def match_any_superseding_event(self, match):
"""Matches any superseding event.
:param match: ``true`` to match any superseding events, ``false`` otherwise
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_superseding_event_terms(self):
"""Clears the superseding event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
superseding_event_terms = property(fdel=clear_superseding_event_terms)
@abc.abstractmethod
def match_offset_event_id(self, offset_event_id, match):
"""Matches events that relates to the offset event.
:param offset_event_id: an ``Id`` for an offset event
:type offset_event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_offset_event_id_terms(self):
"""Clears the recurring events type terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
offset_event_id_terms = property(fdel=clear_offset_event_id_terms)
@abc.abstractmethod
def supports_offset_event_query(self):
"""Tests if an ``OffsetEventQuery`` is available for querying offset events.
:return: ``true`` if an offset event query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_offset_event_query(self):
"""Gets the query for an offset event.
Multiple retrievals produce a nested ``OR`` term.
:return: the offset event query
:rtype: ``osid.calendaring.OffsetEventQuery``
:raise: ``Unimplemented`` -- ``supports_offset_event_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_offset_event_query()`` is ``true``.*
"""
return # osid.calendaring.OffsetEventQuery
offset_event_query = property(fget=get_offset_event_query)
@abc.abstractmethod
def match_any_offset_event(self, match):
"""Matches any offset event.
:param match: ``true`` to match any offset events, ``false`` otherwise
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_offset_event_terms(self):
"""Clears the offset event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
offset_event_terms = property(fdel=clear_offset_event_terms)
@abc.abstractmethod
def match_location_description(self, location, string_match_type, match):
"""Matches the location description string.
:param location: location string
:type location: ``string``
:param string_match_type: string match type
:type string_match_type: ``osid.type.Type``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``location`` is not of ``string_match_type``
:raise: ``NullArgument`` -- ``location`` or ``string_match_type`` is ``null``
:raise: ``Unsupported`` -- ``supports_string_match_type(string_match_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_location_description(self, match):
"""Matches an event that has any location description assigned.
:param match: ``true`` to match events with any location description, ``false`` to match events with no location
description
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_location_description_terms(self):
"""Clears the location description terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
location_description_terms = property(fdel=clear_location_description_terms)
@abc.abstractmethod
def match_location_id(self, location_id, match):
"""Sets the location ``Id`` for this query.
:param location_id: a location ``Id``
:type location_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``location_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_location_id_terms(self):
"""Clears the location ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
location_id_terms = property(fdel=clear_location_id_terms)
@abc.abstractmethod
def supports_location_query(self):
"""Tests if a ``LocationQuery`` is available for querying locations.
:return: ``true`` if a location query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_location_query(self):
"""Gets the query for a location.
Multiple retrievals produce a nested ``OR`` term.
:return: the location query
:rtype: ``osid.mapping.LocationQuery``
:raise: ``Unimplemented`` -- ``supports_location_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_query()`` is ``true``.*
"""
return # osid.mapping.LocationQuery
location_query = property(fget=get_location_query)
@abc.abstractmethod
def match_any_location(self, match):
"""Matches an event that has any location assigned.
:param match: ``true`` to match events with any location, ``false`` to match events with no location
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_location_terms(self):
"""Clears the location terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
location_terms = property(fdel=clear_location_terms)
@abc.abstractmethod
def match_sponsor_id(self, sponsor_id, match):
"""Sets the sponsor ``Id`` for this query.
:param sponsor_id: a sponsor ``Id``
:type sponsor_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``sponsor_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_sponsor_id_terms(self):
"""Clears the sponsor ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
sponsor_id_terms = property(fdel=clear_sponsor_id_terms)
@abc.abstractmethod
def supports_sponsor_query(self):
"""Tests if a ``LocationQuery`` is available for querying sponsors.
:return: ``true`` if a sponsor query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_sponsor_query(self):
"""Gets the query for a sponsor.
Multiple retrievals produce a nested ``OR`` term.
:return: the sponsor query
:rtype: ``osid.resource.ResourceQuery``
:raise: ``Unimplemented`` -- ``supports_sponsor_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_sponsor_query()`` is ``true``.*
"""
return # osid.resource.ResourceQuery
sponsor_query = property(fget=get_sponsor_query)
@abc.abstractmethod
def clear_sponsor_terms(self):
"""Clears the sponsor terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
sponsor_terms = property(fdel=clear_sponsor_terms)
@abc.abstractmethod
def match_coordinate(self, coordinate, match):
"""Matches events whose locations contain the given coordinate.
:param coordinate: a coordinate
:type coordinate: ``osid.mapping.Coordinate``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``coordinate`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_coordinate_terms(self):
"""Clears the cooordinate terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
coordinate_terms = property(fdel=clear_coordinate_terms)
@abc.abstractmethod
def match_spatial_unit(self, spatial_unit, match):
"""Matches events whose locations fall within the given spatial unit.
:param spatial_unit: a spatial unit
:type spatial_unit: ``osid.mapping.SpatialUnit``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``spatial_unit`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_spatial_unit_terms(self):
"""Clears the spatial unit terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
spatial_unit_terms = property(fdel=clear_spatial_unit_terms)
@abc.abstractmethod
def match_commitment_id(self, commitment_id, match):
"""Sets the commitment ``Id`` for this query.
:param commitment_id: a commitment ``Id``
:type commitment_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``commitment_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_commitment_id_terms(self):
"""Clears the commitment ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
commitment_id_terms = property(fdel=clear_commitment_id_terms)
@abc.abstractmethod
def supports_commitment_query(self):
"""Tests if a ``CommitmentQuery`` is available for querying recurring event terms.
:return: ``true`` if a commitment query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_commitment_query(self):
"""Gets the query for a commitment.
Multiple retrievals produce a nested ``OR`` term.
:return: the commitment query
:rtype: ``osid.calendaring.CommitmentQuery``
:raise: ``Unimplemented`` -- ``supports_commitment_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_commitment_query()`` is ``true``.*
"""
return # osid.calendaring.CommitmentQuery
commitment_query = property(fget=get_commitment_query)
@abc.abstractmethod
def match_any_commitment(self, match):
"""Matches an event that has any commitment.
:param match: ``true`` to match events with any commitment, ``false`` to match events with no commitments
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_commitment_terms(self):
"""Clears the commitment terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
commitment_terms = property(fdel=clear_commitment_terms)
@abc.abstractmethod
def match_containing_event_id(self, event_id, match):
"""Sets the event ``Id`` for this query to match events that have the specified event as an ancestor.
:param event_id: an event ``Id``
:type event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``event_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_containing_event_id_terms(self):
"""Clears the containing event ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
containing_event_id_terms = property(fdel=clear_containing_event_id_terms)
@abc.abstractmethod
def supports_containing_event_query(self):
"""Tests if a containing event query is available.
:return: ``true`` if a containing event query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_containing_event_query(self):
"""Gets the query for a containing event.
:return: the containing event query
:rtype: ``osid.calendaring.EventQuery``
:raise: ``Unimplemented`` -- ``supports_containing_event_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_containing_event_query()`` is ``true``.*
"""
return # osid.calendaring.EventQuery
containing_event_query = property(fget=get_containing_event_query)
@abc.abstractmethod
def match_any_containing_event(self, match):
"""Matches events with any ancestor event.
:param match: ``true`` to match events with any ancestor event, ``false`` to match events with no ancestor
events
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_containing_event_terms(self):
"""Clears the containing event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
containing_event_terms = property(fdel=clear_containing_event_terms)
@abc.abstractmethod
def match_calendar_id(self, calendar_id, match):
"""Sets the calendar ``Id`` for this query.
:param calendar_id: a calendar ``Id``
:type calendar_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``calendar_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_calendar_id_terms(self):
"""Clears the calendar ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_id_terms = property(fdel=clear_calendar_id_terms)
@abc.abstractmethod
def supports_calendar_query(self):
"""Tests if a ``CalendarQuery`` is available for querying calendars.
:return: ``true`` if a calendar query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_calendar_query(self):
"""Gets the query for a calendar.
Multiple retrievals produce a nested ``OR`` term.
:return: the calendar query
:rtype: ``osid.calendaring.CalendarQuery``
:raise: ``Unimplemented`` -- ``supports_calendar_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_calendar_query()`` is ``true``.*
"""
return # osid.calendaring.CalendarQuery
calendar_query = property(fget=get_calendar_query)
@abc.abstractmethod
def clear_calendar_terms(self):
"""Clears the calendar terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_terms = property(fdel=clear_calendar_terms)
@abc.abstractmethod
def get_event_query_record(self, event_record_type):
"""Gets the event query record corresponding to the given ``Event`` record ``Type``.
Multiple retrievals produce a nested ``OR`` term.
:param event_record_type: an event query record type
:type event_record_type: ``osid.type.Type``
:return: the event query record
:rtype: ``osid.calendaring.records.EventQueryRecord``
:raise: ``NullArgument`` -- ``event_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(event_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.records.EventQueryRecord
class RecurringEventQuery:
"""This is the query for searching recurring events.
Each method match request produces an ``AND`` term while multiple
invocations of a method produces a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_schedule_id(self, schedule_id, match):
"""Sets the schedule ``Id`` for this query for matching schedules.
:param schedule_id: a schedule ``Id``
:type schedule_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``schedule_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_schedule_id_terms(self):
"""Clears the schedule ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
schedule_id_terms = property(fdel=clear_schedule_id_terms)
@abc.abstractmethod
def supports_schedule_query(self):
"""Tests if a ``ScheduleQuery`` is available for querying schedules.
:return: ``true`` if a schedule query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_schedule_query(self):
"""Gets the query for a schedule.
Multiple retrievals produce a nested ``OR`` term.
:return: the schedule query
:rtype: ``osid.calendaring.ScheduleQuery``
:raise: ``Unimplemented`` -- ``supports_schedule_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_schedule_query()`` is ``true``.*
"""
return # osid.calendaring.ScheduleQuery
schedule_query = property(fget=get_schedule_query)
@abc.abstractmethod
def match_any_schedule(self, match):
"""Matches a recurring event that has any schedule assigned.
:param match: ``true`` to match recurring events with any schedules, ``false`` to match recurring events with no
schedules
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_schedule_terms(self):
"""Clears the schedule terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
schedule_terms = property(fdel=clear_schedule_terms)
@abc.abstractmethod
def match_superseding_event_id(self, superseding_event_id, match):
"""Sets the superseding event ``Id`` for this query.
:param superseding_event_id: a superseding event ``Id``
:type superseding_event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``superseding_event_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_superseding_event_id_terms(self):
"""Clears the superseding event ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
superseding_event_id_terms = property(fdel=clear_superseding_event_id_terms)
@abc.abstractmethod
def supports_superseding_event_query(self):
"""Tests if a ``SupersedingEventQuery`` is available for querying superseding events.
:return: ``true`` if a superseding event query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_superseding_event_query(self):
"""Gets the query for a superseding event.
Multiple retrievals produce a nested ``OR`` term.
:return: the superseding event query
:rtype: ``osid.calendaring.SupersedingEventQuery``
:raise: ``Unimplemented`` -- ``supports_superseding_event_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_superseding_event_query()`` is ``true``.*
"""
return # osid.calendaring.SupersedingEventQuery
superseding_event_query = property(fget=get_superseding_event_query)
@abc.abstractmethod
def match_any_superseding_event(self, match):
"""Matches a recurring event that has any superseding event assigned.
:param match: ``true`` to match recurring events with any superseding events, ``false`` to match events with no
superseding events
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_superseding_event_terms(self):
"""Clears the superseding event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
superseding_event_terms = property(fdel=clear_superseding_event_terms)
@abc.abstractmethod
def match_specific_meeting_time(self, start, end, match):
"""Matches recurring events with specific dates between the given range inclusive.
:param start: start date
:type start: ``osid.calendaring.DateTime``
:param end: end date
:type end: ``osid.calendaring.DateTime``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``end`` is less than ``start``
:raise: ``NullArgument`` -- ``start`` or ``end`` is ``zero``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_specific_meeting_time(self, match):
"""Matches a recurring event that has any specific date assigned.
:param match: ``true`` to match recurring events with any specific date, ``false`` to match recurring events
with no specific date
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_specific_meeting_time_terms(self):
"""Clears the blackout terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
specific_meeting_time_terms = property(fdel=clear_specific_meeting_time_terms)
@abc.abstractmethod
def match_event_id(self, event_id, match):
"""Sets the composed event ``Id`` for this query.
:param event_id: an event ``Id``
:type event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``event_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_event_id_terms(self):
"""Clears the event ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
event_id_terms = property(fdel=clear_event_id_terms)
@abc.abstractmethod
def supports_event_query(self):
"""Tests if an ``EventQuery`` is available for querying composed events.
:return: ``true`` if an event query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_event_query(self):
"""Gets the query for an event.
Multiple retrievals produce a nested ``OR`` term.
:return: the event query
:rtype: ``osid.calendaring.EventQuery``
:raise: ``Unimplemented`` -- ``supports_event_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_event_query()`` is ``true``.*
"""
return # osid.calendaring.EventQuery
event_query = property(fget=get_event_query)
@abc.abstractmethod
def match_any_event(self, match):
"""Matches a recurring event that has any composed event assigned.
:param match: ``true`` to match recurring events with any composed events, ``false`` to match events with no
composed events
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_event_terms(self):
"""Clears the event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
event_terms = property(fdel=clear_event_terms)
@abc.abstractmethod
def match_blackout(self, datetime, match):
"""Matches a blackout that contains the given date time.
:param datetime: a datetime
:type datetime: ``osid.calendaring.DateTime``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``datetime`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_blackout(self, match):
"""Matches a recurring event that has any blackout assigned.
:param match: ``true`` to match recurring events with any blackout, ``false`` to match recurring events with no
blackout
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_blackout_terms(self):
"""Clears the blackout terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
blackout_terms = property(fdel=clear_blackout_terms)
@abc.abstractmethod
def match_blackout_inclusive(self, start, end, match):
"""Matches recurring events with blackouts between the given range inclusive.
:param start: start date
:type start: ``osid.calendaring.DateTime``
:param end: end date
:type end: ``osid.calendaring.DateTime``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``end`` is less than ``start``
:raise: ``NullArgument`` -- ``start`` or ``end`` is ``zero``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_blackout_inclusive_terms(self):
"""Clears the blackout terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
blackout_inclusive_terms = property(fdel=clear_blackout_inclusive_terms)
@abc.abstractmethod
def match_sponsor_id(self, sponsor_id, match):
"""Sets the sponsor ``Id`` for this query.
:param sponsor_id: a sponsor ``Id``
:type sponsor_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``sponsor_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_sponsor_id_terms(self):
"""Clears the sponsor ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
sponsor_id_terms = property(fdel=clear_sponsor_id_terms)
@abc.abstractmethod
def supports_sponsor_query(self):
"""Tests if a ``LocationQuery`` is available for querying sponsors.
:return: ``true`` if a sponsor query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_sponsor_query(self):
"""Gets the query for a sponsor.
Multiple retrievals produce a nested ``OR`` term.
:return: the sponsor query
:rtype: ``osid.resource.ResourceQuery``
:raise: ``Unimplemented`` -- ``supports_sponsor_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_sponsor_query()`` is ``true``.*
"""
return # osid.resource.ResourceQuery
sponsor_query = property(fget=get_sponsor_query)
@abc.abstractmethod
def clear_sponsor_terms(self):
"""Clears the sponsor terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
sponsor_terms = property(fdel=clear_sponsor_terms)
@abc.abstractmethod
def match_calendar_id(self, calendar_id, match):
"""Sets the calendar ``Id`` for this query.
:param calendar_id: a calendar ``Id``
:type calendar_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``calendar_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_calendar_id_terms(self):
"""Clears the calendar ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_id_terms = property(fdel=clear_calendar_id_terms)
@abc.abstractmethod
def supports_calendar_query(self):
"""Tests if a ``CalendarQuery`` is available for querying calendars.
:return: ``true`` if a calendar query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_calendar_query(self):
"""Gets the query for a calendar.
Multiple retrievals produce a nested ``OR`` term.
:return: the calendar query
:rtype: ``osid.calendaring.CalendarQuery``
:raise: ``Unimplemented`` -- ``supports_calendar_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_calendar_query()`` is ``true``.*
"""
return # osid.calendaring.CalendarQuery
calendar_query = property(fget=get_calendar_query)
@abc.abstractmethod
def clear_calendar_terms(self):
"""Clears the calendar terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_terms = property(fdel=clear_calendar_terms)
@abc.abstractmethod
def get_recurring_event_query_record(self, recurring_event_record_type):
"""Gets the recurring event query recod corresponding to the given ``RecurringEvent`` record ``Type``.
Multiple retrievals produce a nested ``OR`` term.
:param recurring_event_record_type: a recurring event query record type
:type recurring_event_record_type: ``osid.type.Type``
:return: the recurring event query record
:rtype: ``osid.calendaring.records.RecurringEventQueryRecord``
:raise: ``NullArgument`` -- ``recurring_event_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(recurring_event_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.records.RecurringEventQueryRecord
class SupersedingEventQuery:
"""This is the query for searching superseding events.
Each method match request produces an ``AND`` term while multiple
invocations of a method produces a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_superseded_event_id(self, event_id, match):
"""Sets the event ``Id`` for this query for matching attached events.
:param event_id: an event ``Id``
:type event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``event_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_superseded_event_id_terms(self):
"""Clears the event ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
superseded_event_id_terms = property(fdel=clear_superseded_event_id_terms)
@abc.abstractmethod
def supports_superseded_event_query(self):
"""Tests if an ``EventQuery`` is available for querying attached events.
:return: ``true`` if an event query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_superseded_event_query(self):
"""Gets the query for an attached event.
Multiple retrievals produce a nested ``OR`` term.
:return: the event query
:rtype: ``osid.calendaring.EventQuery``
:raise: ``Unimplemented`` -- ``supports_superseded_event_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_superseded_event_query()`` is ``true``.*
"""
return # osid.calendaring.EventQuery
superseded_event_query = property(fget=get_superseded_event_query)
@abc.abstractmethod
def clear_superseded_event_terms(self):
"""Clears the event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
superseded_event_terms = property(fdel=clear_superseded_event_terms)
@abc.abstractmethod
def match_superseding_event_id(self, superseding_event_id, match):
"""Sets the superseding event ``Id`` for this query.
:param superseding_event_id: a superseding event ``Id``
:type superseding_event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``superseding_event_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_superseding_event_id_terms(self):
"""Clears the superseding event ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
superseding_event_id_terms = property(fdel=clear_superseding_event_id_terms)
@abc.abstractmethod
def supports_superseding_event_query(self):
"""Tests if a ``SupersedingEventQuery`` is available.
:return: ``true`` if a superseding event query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_superseding_event_query(self):
"""Gets the query for a superseding event.
Multiple retrievals produce a nested ``OR`` term.
:return: the superseding event query
:rtype: ``osid.calendaring.EventQuery``
:raise: ``Unimplemented`` -- ``supports_superseding_event_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_superseding_event_query()`` is ``true``.*
"""
return # osid.calendaring.EventQuery
superseding_event_query = property(fget=get_superseding_event_query)
@abc.abstractmethod
def clear_superseding_event_terms(self):
"""Clears the superseding event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
superseding_event_terms = property(fdel=clear_superseding_event_terms)
@abc.abstractmethod
def match_superseded_date(self, from_, to, match):
"""Matches superseding events that supersede within the given dates inclusive.
:param from: start date
:type from: ``osid.calendaring.DateTime``
:param to: end date
:type to: ``osid.calendaring.DateTime``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``from`` is greater than ``to``
:raise: ``NullArgument`` -- ``from`` or ``to`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_superseded_date(self, match):
"""Matches a superseding event that has any superseded date.
:param match: ``true`` to match superseding events with any superseded date, false to match superseding events
with no superseded date
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_superseded_date_terms(self):
"""Clears the superseded date terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
superseded_date_terms = property(fdel=clear_superseded_date_terms)
@abc.abstractmethod
def match_superseded_event_position(self, from_, to, match):
"""Matches superseding events that supersede within the denormalized event positions inclusive.
:param from: start position
:type from: ``integer``
:param to: end position
:type to: ``integer``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- the absolute value of ``from`` is greater than ``to``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_superseded_event_position(self, match):
"""Matches a superseding event that has any superseded position.
:param match: ``true`` to match superseding events with any superseded event position, false to match
superseding events with no superseded event position
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_superseded_event_position_terms(self):
"""Clears the superseded position terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
superseded_event_position_terms = property(fdel=clear_superseded_event_position_terms)
@abc.abstractmethod
def match_calendar_id(self, calendar_id, match):
"""Sets the calendar ``Id`` for this query.
:param calendar_id: a calendar ``Id``
:type calendar_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``calendar_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_calendar_id_terms(self):
"""Clears the calendar ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_id_terms = property(fdel=clear_calendar_id_terms)
@abc.abstractmethod
def supports_calendar_query(self):
"""Tests if a ``CalendarQuery`` is available for querying calendars.
:return: ``true`` if a calendar query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_calendar_query(self):
"""Gets the query for a calendar.
Multiple retrievals produce a nested ``OR`` term.
:return: the calendar query
:rtype: ``osid.calendaring.CalendarQuery``
:raise: ``Unimplemented`` -- ``supports_calendar_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_calendar_query()`` is ``true``.*
"""
return # osid.calendaring.CalendarQuery
calendar_query = property(fget=get_calendar_query)
@abc.abstractmethod
def clear_calendar_terms(self):
"""Clears the calendar terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_terms = property(fdel=clear_calendar_terms)
@abc.abstractmethod
def get_superseding_event_query_record(self, superseding_event_record_type):
"""Gets the superseding event query record corresponding to the given ``SupersedingEvent`` record ``Type``.
Multiple retrievals produce a nested ``OR`` term.
:param superseding_event_record_type: a superseding event query record type
:type superseding_event_record_type: ``osid.type.Type``
:return: the superseding event query record
:rtype: ``osid.calendaring.records.SupersedingEventQueryRecord``
:raise: ``NullArgument`` -- ``superseding_event_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(superseding_event_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.records.SupersedingEventQueryRecord
class OffsetEventQuery:
"""This is the query for searching events.
Each method match request produces an ``AND`` term while multiple
invocations of a method produces a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_fixed_start_time(self, from_, to, match):
"""Matches a fixed start time between the given range inclusive.
:param from: the start of the range
:type from: ``osid.calendaring.DateTime``
:param to: the end of the range
:type to: ``osid.calendaring.DateTime``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``to`` is less than ``from``
:raise: ``NullArgument`` -- ``from`` or ``to`` ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_fixed_start_time(self, match):
"""Matches events with fixed start times.
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_fixed_start_time_terms(self):
"""Clears the fixed start time terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
fixed_start_time_terms = property(fdel=clear_fixed_start_time_terms)
@abc.abstractmethod
def match_start_reference_event_id(self, event_id, match):
"""Sets the start reference event ``Id`` for this query.
:param event_id: an event ``Id``
:type event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``event_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_start_reference_event_id_terms(self):
"""Clears the start reference event ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
start_reference_event_id_terms = property(fdel=clear_start_reference_event_id_terms)
@abc.abstractmethod
def supports_start_reference_event_query(self):
"""Tests if an ``EventQuery`` is available for querying start reference event terms.
:return: ``true`` if an event query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_start_reference_event_query(self):
"""Gets the query for the start reference event.
Multiple retrievals produce a nested ``OR`` term.
:return: the event query
:rtype: ``osid.calendaring.EventQuery``
:raise: ``Unimplemented`` -- ``supports_start_reference_event_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_start_reference_event_query()`` is ``true``.*
"""
return # osid.calendaring.EventQuery
start_reference_event_query = property(fget=get_start_reference_event_query)
@abc.abstractmethod
def match_any_start_reference_event(self, match):
"""Matches offset events with any starting reference event.
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_start_reference_event_terms(self):
"""Clears the start reference event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
start_reference_event_terms = property(fdel=clear_start_reference_event_terms)
@abc.abstractmethod
def match_fixed_start_offset(self, from_, to, match):
"""Matches a fixed offset amount between the given range inclusive.
:param from: the start of the range
:type from: ``osid.calendaring.Duration``
:param to: the end of the range
:type to: ``osid.calendaring.Duration``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``to`` is less than ``from``
:raise: ``NullArgument`` -- ``from`` or ``to`` ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_fixed_start_offset(self, match):
"""Matches fixed offset events.
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_fixed_start_offset_terms(self):
"""Clears the fixed offset terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
fixed_start_offset_terms = property(fdel=clear_fixed_start_offset_terms)
@abc.abstractmethod
def match_relative_weekday_start_offset(self, low, high, match):
"""Matches a relative weekday offset amount between the given range inclusive.
:param low: the start of the range
:type low: ``integer``
:param high: the end of the range
:type high: ``integer``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_relative_weekday_start_offset_terms(self):
"""Clears the relative weekday offset terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
relative_weekday_start_offset_terms = property(fdel=clear_relative_weekday_start_offset_terms)
@abc.abstractmethod
def match_relative_start_weekday(self, weekday, match):
"""Matches a relative weekday.
:param weekday: the weekday
:type weekday: ``cardinal``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_relative_start_weekday(self, match):
"""Matches relative weekday offset events.
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_relative_start_weekday_terms(self):
"""Clears the relative weekday terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
relative_start_weekday_terms = property(fdel=clear_relative_start_weekday_terms)
@abc.abstractmethod
def match_fixed_duration(self, low, high, match):
"""Matches a fixed duration between the given range inclusive.
:param low: the start of the range
:type low: ``osid.calendaring.Duration``
:param high: the end of the range
:type high: ``osid.calendaring.Duration``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_fixed_duration_terms(self):
"""Clears the fixed duration offset terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
fixed_duration_terms = property(fdel=clear_fixed_duration_terms)
@abc.abstractmethod
def match_end_reference_event_id(self, event_id, match):
"""Sets the end reference event ``Id`` for this query.
:param event_id: an event ``Id``
:type event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``event_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_end_reference_event_id_terms(self):
"""Clears the end reference event ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
end_reference_event_id_terms = property(fdel=clear_end_reference_event_id_terms)
@abc.abstractmethod
def supports_end_reference_event_query(self):
"""Tests if an ``EventQuery`` is available for querying end reference event terms.
:return: ``true`` if an event query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_end_reference_event_query(self):
"""Gets the query for the end reference event.
Multiple retrievals produce a nested ``OR`` term.
:return: the event query
:rtype: ``osid.calendaring.EventQuery``
:raise: ``Unimplemented`` -- ``supports_event_reference_event_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_end_reference_event_query()`` is ``true``.*
"""
return # osid.calendaring.EventQuery
end_reference_event_query = property(fget=get_end_reference_event_query)
@abc.abstractmethod
def match_any_end_reference_event(self, match):
"""Matches any end reference event events.
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_end_reference_event_terms(self):
"""Clears the end reference event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
end_reference_event_terms = property(fdel=clear_end_reference_event_terms)
@abc.abstractmethod
def match_fixed_end_offset(self, from_, to, match):
"""Matches a fixed offset amount between the given range inclusive.
:param from: the start of the range
:type from: ``osid.calendaring.Duration``
:param to: the end of the range
:type to: ``osid.calendaring.Duration``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``to`` is less than ``from``
:raise: ``NullArgument`` -- ``from`` or ``to`` ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_fixed_end_offset(self, match):
"""Matches fixed offset events.
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_fixed_end_offset_terms(self):
"""Clears the fixed offset terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
fixed_end_offset_terms = property(fdel=clear_fixed_end_offset_terms)
@abc.abstractmethod
def match_relative_weekday_end_offset(self, low, high, match):
"""Matches a relative weekday offset amount between the given range inclusive.
:param low: the start of the range
:type low: ``integer``
:param high: the end of the range
:type high: ``integer``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_relative_weekday_end_offset_terms(self):
"""Clears the relative weekday offset terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
relative_weekday_end_offset_terms = property(fdel=clear_relative_weekday_end_offset_terms)
@abc.abstractmethod
def match_relative_end_weekday(self, weekday, match):
"""Matches a relative weekday.
:param weekday: the weekday
:type weekday: ``cardinal``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_relative_end_weekday(self, match):
"""Matches relative weekday offset events.
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_relative_end_weekday_terms(self):
"""Clears the relative weekday terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
relative_end_weekday_terms = property(fdel=clear_relative_end_weekday_terms)
@abc.abstractmethod
def match_location_description(self, location, string_match_type, match):
"""Matches the location description string.
:param location: location string
:type location: ``string``
:param string_match_type: string match type
:type string_match_type: ``osid.type.Type``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``location`` is not of ``string_match_type``
:raise: ``NullArgument`` -- ``location`` or ``string_match_type`` is ``null``
:raise: ``Unsupported`` -- ``supports_string_match_type(string_match_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_location_description(self, match):
"""Matches an event that has any location description assigned.
:param match: ``true`` to match events with any location description, ``false`` to match events with no location
description
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_location_description_terms(self):
"""Clears the location description terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
location_description_terms = property(fdel=clear_location_description_terms)
@abc.abstractmethod
def match_location_id(self, location_id, match):
"""Sets the location ``Id`` for this query.
:param location_id: a location ``Id``
:type location_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``location_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_location_id_terms(self):
"""Clears the location ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
location_id_terms = property(fdel=clear_location_id_terms)
@abc.abstractmethod
def supports_location_query(self):
"""Tests if a ``LocationQuery`` is available for querying locations.
:return: ``true`` if a location query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_location_query(self):
"""Gets the query for a location.
Multiple retrievals produce a nested ``OR`` term.
:return: the location query
:rtype: ``osid.mapping.LocationQuery``
:raise: ``Unimplemented`` -- ``supports_location_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_query()`` is ``true``.*
"""
return # osid.mapping.LocationQuery
location_query = property(fget=get_location_query)
@abc.abstractmethod
def match_any_location(self, match):
"""Matches an event that has any location assigned.
:param match: ``true`` to match events with any location, ``false`` to match events with no location
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_location_terms(self):
"""Clears the location terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
location_terms = property(fdel=clear_location_terms)
@abc.abstractmethod
def match_sponsor_id(self, sponsor_id, match):
"""Sets the sponsor ``Id`` for this query.
:param sponsor_id: a sponsor ``Id``
:type sponsor_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``sponsor_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_sponsor_id_terms(self):
"""Clears the sponsor ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
sponsor_id_terms = property(fdel=clear_sponsor_id_terms)
@abc.abstractmethod
def supports_sponsor_query(self):
"""Tests if a ``LocationQuery`` is available for querying sponsors.
:return: ``true`` if a sponsor query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_sponsor_query(self):
"""Gets the query for a sponsor.
Multiple retrievals produce a nested ``OR`` term.
:return: the sponsor query
:rtype: ``osid.resource.ResourceQuery``
:raise: ``Unimplemented`` -- ``supports_sponsor_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_sponsor_query()`` is ``true``.*
"""
return # osid.resource.ResourceQuery
sponsor_query = property(fget=get_sponsor_query)
@abc.abstractmethod
def clear_sponsor_terms(self):
"""Clears the sponsor terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
sponsor_terms = property(fdel=clear_sponsor_terms)
@abc.abstractmethod
def match_calendar_id(self, calendar_id, match):
"""Sets the calendar ``Id`` for this query.
:param calendar_id: a calendar ``Id``
:type calendar_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``calendar_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_calendar_id_terms(self):
"""Clears the calendar ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_id_terms = property(fdel=clear_calendar_id_terms)
@abc.abstractmethod
def supports_calendar_query(self):
"""Tests if a ``CalendarQuery`` is available for querying calendars.
:return: ``true`` if a calendar query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_calendar_query(self):
"""Gets the query for a calendar.
Multiple retrievals produce a nested ``OR`` term.
:return: the calendar query
:rtype: ``osid.calendaring.CalendarQuery``
:raise: ``Unimplemented`` -- ``supports_calendar_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_calendar_query()`` is ``true``.*
"""
return # osid.calendaring.CalendarQuery
calendar_query = property(fget=get_calendar_query)
@abc.abstractmethod
def clear_calendar_terms(self):
"""Clears the calendar terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_terms = property(fdel=clear_calendar_terms)
@abc.abstractmethod
def get_offset_event_query_record(self, offset_event_record_type):
"""Gets the offset event query record corresponding to the given ``OffsetEvent`` record ``Type``.
Multiple retrievals produce a nested ``OR`` term.
:param offset_event_record_type: an offset event query record type
:type offset_event_record_type: ``osid.type.Type``
:return: the offset event query record
:rtype: ``osid.calendaring.records.OffsetEventQueryRecord``
:raise: ``NullArgument`` -- ``offset_event_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(offset_event_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.records.OffsetEventQueryRecord
class ScheduleQuery:
"""This is the query for searching schedules.
Each method match request produces an ``AND`` term while multiple
invocations of a method produces a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_schedule_slot_id(self, schedule_slot_id, match):
"""Sets the schedule ``Id`` for this query for matching nested schedule slots.
:param schedule_slot_id: a schedule slot ``Id``
:type schedule_slot_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``schedule_slot_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_schedule_slot_id_terms(self):
"""Clears the schedule slot ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
schedule_slot_id_terms = property(fdel=clear_schedule_slot_id_terms)
@abc.abstractmethod
def supports_schedule_slot_query(self):
"""Tests if a ``ScheduleSlotQuery`` is available for querying sechedule slots.
:return: ``true`` if a schedule slot query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_schedule_slot_query(self):
"""Gets the query for a schedul slot.
Multiple retrievals produce a nested ``OR`` term.
:return: the schedule slot query
:rtype: ``osid.calendaring.ScheduleSlotQuery``
:raise: ``Unimplemented`` -- ``supports_schedule_slot_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_schedule_slot_query()`` is ``true``.*
"""
return # osid.calendaring.ScheduleSlotQuery
schedule_slot_query = property(fget=get_schedule_slot_query)
@abc.abstractmethod
def match_any_schedule_slot(self, match):
"""Matches a schedule that has any schedule slot assigned.
:param match: ``true`` to match schedule with any schedule slots, ``false`` to match schedules with no schedule
slots
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_schedule_slot_terms(self):
"""Clears the schedule slot terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
schedule_slot_terms = property(fdel=clear_schedule_slot_terms)
@abc.abstractmethod
def match_time_period_id(self, time_period_id, match):
"""Sets the time period ``Id`` for this query.
:param time_period_id: a time period ``Id``
:type time_period_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``time_period_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_time_period_id_terms(self):
"""Clears the time period ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
time_period_id_terms = property(fdel=clear_time_period_id_terms)
@abc.abstractmethod
def supports_time_period_query(self):
"""Tests if a ``TimePeriodQuery`` is available.
:return: ``true`` if a time period query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_time_period_query(self):
"""Gets the query for a time period.
Multiple retrievals produce a nested ``OR`` term.
:return: the time period query
:rtype: ``osid.calendaring.TimePeriodQuery``
:raise: ``Unimplemented`` -- ``supports_time_period_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_time_period_query()`` is ``true``.*
"""
return # osid.calendaring.TimePeriodQuery
time_period_query = property(fget=get_time_period_query)
@abc.abstractmethod
def match_any_time_period(self, match):
"""Matches a schedule that has any time period assigned.
:param match: ``true`` to match schedules with any time periods, ``false`` to match schedules with no time
periods
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_time_period_terms(self):
"""Clears the time period terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
time_period_terms = property(fdel=clear_time_period_terms)
@abc.abstractmethod
def match_schedule_start(self, low, high, match):
"""Matches the schedule start time between the given range inclusive.
:param low: low time range
:type low: ``osid.calendaring.DateTime``
:param high: high time range
:type high: ``osid.calendaring.DateTime``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``high`` is less than ``low``
:raise: ``NullArgument`` -- ``high`` or ``low`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_schedule_start(self, match):
"""Matches a schedule that has any start time assigned.
:param match: ``true`` to match schedules with any start time, ``false`` to match schedules with no start time
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_schedule_start_terms(self):
"""Clears the schedule start terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
schedule_start_terms = property(fdel=clear_schedule_start_terms)
@abc.abstractmethod
def match_schedule_end(self, low, high, match):
"""Matches the schedule end time between the given range inclusive.
:param low: low time range
:type low: ``osid.calendaring.DateTime``
:param high: high time range
:type high: ``osid.calendaring.DateTime``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``high`` is less than ``low``
:raise: ``NullArgument`` -- ``high`` or ``low`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_schedule_end(self, match):
"""Matches a schedule that has any end time assigned.
:param match: ``true`` to match schedules with any end time, ``false`` to match schedules with no start time
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_schedule_end_terms(self):
"""Clears the schedule end terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
schedule_end_terms = property(fdel=clear_schedule_end_terms)
@abc.abstractmethod
def match_schedule_time(self, date, match):
"""Matches schedules with start and end times between the given range inclusive.
:param date: a date
:type date: ``osid.calendaring.DateTime``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``date`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_schedule_time(self, match):
"""Matches schedules that has any time assigned.
:param match: ``true`` to match schedules with any time, ``false`` to match schedules with no time
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_schedule_time_terms(self):
"""Clears the schedule time terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
schedule_time_terms = property(fdel=clear_schedule_time_terms)
@abc.abstractmethod
def match_schedule_time_inclusive(self, start, end, match):
"""Matches schedules with start and end times between the given range inclusive.
:param start: start date
:type start: ``osid.calendaring.DateTime``
:param end: end date
:type end: ``osid.calendaring.DateTime``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``end`` is less than ``start``
:raise: ``NullArgument`` -- ``end`` or ``start`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_schedule_time_inclusive_terms(self):
"""Clears the schedule time inclusive terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
schedule_time_inclusive_terms = property(fdel=clear_schedule_time_inclusive_terms)
@abc.abstractmethod
def match_limit(self, from_, to, match):
"""Matches schedules that have the given limit in the given range inclusive.
:param from: start range
:type from: ``integer``
:param to: end range
:type to: ``integer``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``to`` is less than ``from``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_limit(self, match):
"""Matches schedules with any occurrence limit.
:param match: ``true`` to match schedules with any limit, to match schedules with no limit
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_limit_terms(self):
"""Clears the limit terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
limit_terms = property(fdel=clear_limit_terms)
@abc.abstractmethod
def match_location_description(self, location, string_match_type, match):
"""Matches the location description string.
:param location: location string
:type location: ``string``
:param string_match_type: string match type
:type string_match_type: ``osid.type.Type``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``location`` is not of ``string_match_type``
:raise: ``NullArgument`` -- ``location`` or ``string_match_type`` is ``null``
:raise: ``Unsupported`` -- ``supports_string_match_type(string_match_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_location_description(self, match):
"""Matches a schedule that has any location description assigned.
:param match: ``true`` to match schedules with any location description, ``false`` to match schedules with no
location description
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_location_description_terms(self):
"""Clears the location description terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
location_description_terms = property(fdel=clear_location_description_terms)
@abc.abstractmethod
def match_location_id(self, location_id, match):
"""Sets the location ``Id`` for this query.
:param location_id: a location ``Id``
:type location_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``location_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_location_id_terms(self):
"""Clears the location ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
location_id_terms = property(fdel=clear_location_id_terms)
@abc.abstractmethod
def supports_location_query(self):
"""Tests if a ``LocationQuery`` is available for querying locations.
:return: ``true`` if a location query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_location_query(self):
"""Gets the query for a location.
Multiple retrievals produce a nested ``OR`` term.
:return: the location query
:rtype: ``osid.mapping.LocationQuery``
:raise: ``Unimplemented`` -- ``supports_location_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_query()`` is ``true``.*
"""
return # osid.mapping.LocationQuery
location_query = property(fget=get_location_query)
@abc.abstractmethod
def match_any_location(self, match):
"""Matches a schedule that has any location assigned.
:param match: ``true`` to match schedules with any location, ``false`` to match schedules with no location
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_location_terms(self):
"""Clears the location terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
location_terms = property(fdel=clear_location_terms)
@abc.abstractmethod
def match_total_duration(self, low, high, match):
"""Matches the total duration between the given range inclusive.
:param low: low duration range
:type low: ``osid.calendaring.Duration``
:param high: high duration range
:type high: ``osid.calendaring.Duration``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``high`` is less than ``low``
:raise: ``NullArgument`` -- ``high`` or ``low`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_total_duration_terms(self):
"""Clears the total duration terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
total_duration_terms = property(fdel=clear_total_duration_terms)
@abc.abstractmethod
def match_calendar_id(self, calendar_id, match):
"""Sets the calendar ``Id`` for this query.
:param calendar_id: a calendar ``Id``
:type calendar_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``calendar_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_calendar_id_terms(self):
"""Clears the calendar ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_id_terms = property(fdel=clear_calendar_id_terms)
@abc.abstractmethod
def supports_calendar_query(self):
"""Tests if a ``CalendarQuery`` is available for querying calendars.
:return: ``true`` if a calendar query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_calendar_query(self):
"""Gets the query for a calendar.
Multiple retrievals produce a nested ``OR`` term.
:return: the calendar query
:rtype: ``osid.calendaring.CalendarQuery``
:raise: ``Unimplemented`` -- ``supports_calendar_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_calendar_query()`` is ``true``.*
"""
return # osid.calendaring.CalendarQuery
calendar_query = property(fget=get_calendar_query)
@abc.abstractmethod
def clear_calendar_terms(self):
"""Clears the calendar terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_terms = property(fdel=clear_calendar_terms)
@abc.abstractmethod
def get_schedule_query_record(self, schedule_record_type):
"""Gets the schedule query record corresponding to the given ``Schedule`` record ``Type``.
Multiple retrievals produce a nested ``OR`` term.
:param schedule_record_type: a schedule query record type
:type schedule_record_type: ``osid.type.Type``
:return: the schedule query record
:rtype: ``osid.calendaring.records.ScheduleQueryRecord``
:raise: ``NullArgument`` -- ``schedule_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(schedule_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.records.ScheduleQueryRecord
class ScheduleSlotQuery:
"""This is the query for searching schedule slots.
Each method match request produces an ``AND`` term while multiple
invocations of a method produces a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_schedule_slot_id(self, schedule_slot_id, match):
"""Sets the schedule ``Id`` for this query for matching nested schedule slots.
:param schedule_slot_id: a schedule slot ``Id``
:type schedule_slot_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``schedule_slot_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_schedule_slot_id_terms(self):
"""Clears the schedule slot ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
schedule_slot_id_terms = property(fdel=clear_schedule_slot_id_terms)
@abc.abstractmethod
def supports_schedule_slot_query(self):
"""Tests if a ``ScheduleSlotQuery`` is available for querying sechedule slots.
:return: ``true`` if a schedule slot query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_schedule_slot_query(self):
"""Gets the query for a schedul slot.
Multiple retrievals produce a nested ``OR`` term.
:return: the schedule slot query
:rtype: ``osid.calendaring.ScheduleSlotQuery``
:raise: ``Unimplemented`` -- ``supports_schedule_slot_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_schedule_slot_query()`` is ``true``.*
"""
return # osid.calendaring.ScheduleSlotQuery
schedule_slot_query = property(fget=get_schedule_slot_query)
@abc.abstractmethod
def match_any_schedule_slot(self, match):
"""Matches a schedule that has any schedule slot assigned.
:param match: ``true`` to match schedule with any schedule slots, ``false`` to match schedules with no schedule
slots
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_schedule_slot_terms(self):
"""Clears the schedule slot terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
schedule_slot_terms = property(fdel=clear_schedule_slot_terms)
@abc.abstractmethod
def match_weekday(self, weekday, match):
"""Matches schedules that have the given weekday.
:param weekday: a weekday
:type weekday: ``cardinal``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_weekday(self, match):
"""Matches schedules with any weekday set.
:param match: ``true`` to match schedules with any weekday, ``false`` to match schedules with no weekday
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_weekday_terms(self):
"""Clears the weekday terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
weekday_terms = property(fdel=clear_weekday_terms)
@abc.abstractmethod
def match_weekly_interval(self, from_, to, match):
"""Matches schedules that have the given weekly interval in the given range inclusive.
:param from: start range
:type from: ``integer``
:param to: end range
:type to: ``integer``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``to`` is less than ``from``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_weekly_interval(self, match):
"""Matches schedules with any weekly interval set.
:param match: ``true`` to match schedules with any weekly interval, ``false`` to match schedules with no weekly
interval
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_weekly_interval_terms(self):
"""Clears the weekly interval terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
weekly_interval_terms = property(fdel=clear_weekly_interval_terms)
@abc.abstractmethod
def match_week_of_month(self, from_, to, match):
"""Matches schedules that have a week of month in the given range inclusive.
:param from: start range
:type from: ``integer``
:param to: end range
:type to: ``integer``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``to`` is less than ``from``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_week_of_month(self, match):
"""Matches schedules with any month week set.
:param match: ``true`` to match schedules with any week of month, ``false`` to match schedules with no month
week
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_week_of_month_terms(self):
"""Clears the week of month terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
week_of_month_terms = property(fdel=clear_week_of_month_terms)
@abc.abstractmethod
def match_weekday_time(self, from_, to, match):
"""Matches schedules that have a weekday time in the given range inclusive.
:param from: start range
:type from: ``osid.calendaring.Time``
:param to: end range
:type to: ``osid.calendaring.Time``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``to`` is less than ``from``
:raise: ``NullArgument`` -- ``from`` or ``to`` ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_weekday_time(self, match):
"""Matches schedules with any weekday time.
:param match: ``true`` to match schedules with any weekday time, ``false`` to match schedules with no weekday
time
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_weekday_time_terms(self):
"""Clears the weekday time terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
weekday_time_terms = property(fdel=clear_weekday_time_terms)
@abc.abstractmethod
def match_fixed_interval(self, from_, to, match):
"""Matches schedules that have the given fixed interval in the given range inclusive.
:param from: start range
:type from: ``osid.calendaring.Duration``
:param to: end range
:type to: ``osid.calendaring.Duration``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``to`` is less than ``from``
:raise: ``NullArgument`` -- ``from`` or ``to`` ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_fixed_interval(self, match):
"""Matches schedules with any fixed interval.
:param match: ``true`` to match schedules with any fixed interval, ``false`` to match schedules with no fixed
interval
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_fixed_interval_terms(self):
"""Clears the fixed interval terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
fixed_interval_terms = property(fdel=clear_fixed_interval_terms)
@abc.abstractmethod
def match_duration(self, low, high, match):
"""Matches the duration between the given range inclusive.
:param low: low duration range
:type low: ``osid.calendaring.Duration``
:param high: high duration range
:type high: ``osid.calendaring.Duration``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``high`` is less than ``low``
:raise: ``NullArgument`` -- ``high`` or ``low`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_duration(self, match):
"""Matches a schedule slot that has any duration.
:param match: ``true`` to match schedules with any duration, ``false`` to match schedules with no start time
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_duration_terms(self):
"""Clears the duration terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
duration_terms = property(fdel=clear_duration_terms)
@abc.abstractmethod
def match_calendar_id(self, calendar_id, match):
"""Sets the calendar ``Id`` for this query.
:param calendar_id: a calendar ``Id``
:type calendar_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``calendar_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_calendar_id_terms(self):
"""Clears the calendar ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_id_terms = property(fdel=clear_calendar_id_terms)
@abc.abstractmethod
def supports_calendar_query(self):
"""Tests if a ``CalendarQuery`` is available for querying calendars.
:return: ``true`` if a calendar query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_calendar_query(self):
"""Gets the query for a calendar.
Multiple retrievals produce a nested ``OR`` term.
:return: the calendar query
:rtype: ``osid.calendaring.CalendarQuery``
:raise: ``Unimplemented`` -- ``supports_calendar_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_calendar_query()`` is ``true``.*
"""
return # osid.calendaring.CalendarQuery
calendar_query = property(fget=get_calendar_query)
@abc.abstractmethod
def clear_calendar_terms(self):
"""Clears the calendar terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_terms = property(fdel=clear_calendar_terms)
@abc.abstractmethod
def get_schedule_slot_query_record(self, schedule_slot_record_type):
"""Gets the schedule slot query record corresponding to the given ``ScheduleSlot`` record ``Type``.
Multiple retrievals produce a nested ``OR`` term.
:param schedule_slot_record_type: a schedule slot query record type
:type schedule_slot_record_type: ``osid.type.Type``
:return: the schedule slot query record
:rtype: ``osid.calendaring.records.ScheduleSlotQueryRecord``
:raise: ``NullArgument`` -- ``schedule_slot_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(schedule_slot_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.records.ScheduleSlotQueryRecord
class TimePeriodQuery:
"""This is the query for searching time periods.
Each method match request produces an ``AND`` term while multiple
invocations of a method produces a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_start(self, low, high, match):
"""Matches the time period start time between the given range inclusive.
:param low: low time range
:type low: ``osid.calendaring.DateTime``
:param high: high time range
:type high: ``osid.calendaring.DateTime``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``high`` is less than ``low``
:raise: ``NullArgument`` -- ``high`` or ``low`` is ``zero``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_start(self, match):
"""Matches a time period that has any start time assigned.
:param match: ``true`` to match time periods with any start time, ``false`` to match time periods with no start
time
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_start_terms(self):
"""Clears the time period start terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
start_terms = property(fdel=clear_start_terms)
@abc.abstractmethod
def match_end(self, low, high, match):
"""Matches the time period end time between the given range inclusive.
:param low: low time range
:type low: ``osid.calendaring.DateTime``
:param high: high time range
:type high: ``osid.calendaring.DateTime``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``high`` is less than ``low``
:raise: ``NullArgument`` -- ``high`` or ``low`` is ``zero``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_end(self, match):
"""Matches a time period that has any end time assigned.
:param match: ``true`` to match time periods with any end time, ``false`` to match time periods with no end time
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_end_terms(self):
"""Clears the time period end terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
end_terms = property(fdel=clear_end_terms)
@abc.abstractmethod
def match_time(self, time, match):
"""Matches time periods that include the given time.
:param time: date
:type time: ``osid.calendaring.DateTime``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_time(self, match):
"""Matches a time period that has any time assigned.
:param match: ``true`` to match time periods with any time, ``false`` to match time periods with no time
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_time_terms(self):
"""Clears the time terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
time_terms = property(fdel=clear_time_terms)
@abc.abstractmethod
def match_time_inclusive(self, start, end, match):
"""Matches time periods with start and end times between the given range inclusive.
:param start: start date
:type start: ``osid.calendaring.DateTime``
:param end: end date
:type end: ``osid.calendaring.DateTime``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``end`` is less than ``start``
:raise: ``NullArgument`` -- ``start`` or ``end`` is ``zero``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_time_inclusive_terms(self):
"""Clears the time inclusive terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
time_inclusive_terms = property(fdel=clear_time_inclusive_terms)
@abc.abstractmethod
def match_duration(self, low, high, match):
"""Matches the time period duration between the given range inclusive.
:param low: low duration range
:type low: ``osid.calendaring.Duration``
:param high: high duration range
:type high: ``osid.calendaring.Duration``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``high`` is less than ``low``
:raise: ``NullArgument`` -- ``high`` or ``low`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_duration_terms(self):
"""Clears the duration terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
duration_terms = property(fdel=clear_duration_terms)
@abc.abstractmethod
def match_exception_id(self, event_id, match):
"""Sets the event ``Id`` for this query to match exceptions.
:param event_id: an exception event ``Id``
:type event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``event_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_exception_id_terms(self):
"""Clears the exception event ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
exception_id_terms = property(fdel=clear_exception_id_terms)
@abc.abstractmethod
def supports_exception_query(self):
"""Tests if an ``EventQuery`` is available for querying exception events.
:return: ``true`` if a exception query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_exception_query(self):
"""Gets the query for an exception event.
Multiple retrievals produce a nested ``OR`` term.
:return: the event query
:rtype: ``osid.calendaring.EventQuery``
:raise: ``Unimplemented`` -- ``supports_exception_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_exception_query()`` is ``true``.*
"""
return # osid.calendaring.EventQuery
exception_query = property(fget=get_exception_query)
@abc.abstractmethod
def match_any_exception(self, match):
"""Matches a time period that has any exception event assigned.
:param match: ``true`` to match time periods with any exception, ``false`` to match time periods with no
exception
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_exception_terms(self):
"""Clears the exception event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
exception_terms = property(fdel=clear_exception_terms)
@abc.abstractmethod
def match_event_id(self, event_id, match):
"""Sets the event ``Id`` for this query.
:param event_id: an event or recurring event ``Id``
:type event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``event_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_event_id_terms(self):
"""Clears the event ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
event_id_terms = property(fdel=clear_event_id_terms)
@abc.abstractmethod
def supports_event_query(self):
"""Tests if an ``EventQuery`` is available for querying events.
:return: ``true`` if an event query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_event_query(self):
"""Gets the query for an event or recurring event.
Multiple retrievals produce a nested ``OR`` term.
:return: the event query
:rtype: ``osid.calendaring.EventQuery``
:raise: ``Unimplemented`` -- ``supports_event_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_event_query()`` is ``true``.*
"""
return # osid.calendaring.EventQuery
event_query = property(fget=get_event_query)
@abc.abstractmethod
def match_any_event(self, match):
"""Matches a time period that has any event assigned.
:param match: ``true`` to match time periods with any event, ``false`` to match time periods with no events
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_event_terms(self):
"""Clears the event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
event_terms = property(fdel=clear_event_terms)
@abc.abstractmethod
def match_calendar_id(self, calendar_id, match):
"""Sets the calendar ``Id`` for this query.
:param calendar_id: a calendar ``Id``
:type calendar_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``calendar_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_calendar_id_terms(self):
"""Clears the calendar ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_id_terms = property(fdel=clear_calendar_id_terms)
@abc.abstractmethod
def supports_calendar_query(self):
"""Tests if a ``CalendarQuery`` is available for querying resources.
:return: ``true`` if a calendar query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_calendar_query(self):
"""Gets the query for a calendar.
Multiple retrievals produce a nested ``OR`` term.
:return: the calendar query
:rtype: ``osid.calendaring.CalendarQuery``
:raise: ``Unimplemented`` -- ``supports_calendar_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_calendar_query()`` is ``true``.*
"""
return # osid.calendaring.CalendarQuery
calendar_query = property(fget=get_calendar_query)
@abc.abstractmethod
def clear_calendar_terms(self):
"""Clears the calendar terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_terms = property(fdel=clear_calendar_terms)
@abc.abstractmethod
def get_time_period_query_record(self, time_period_record_type):
"""Gets the time period query record corresponding to the given ``TimePeriod`` record ``Type``.
Multiple retrievals produce a nested ``OR`` term.
:param time_period_record_type: a time period query record type
:type time_period_record_type: ``osid.type.Type``
:return: the time period query record
:rtype: ``osid.calendaring.records.TimePeriodQueryRecord``
:raise: ``NullArgument`` -- ``time_period_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(time_period_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.records.TimePeriodQueryRecord
class CommitmentQuery:
"""This is the query for searching commitments.
Each method match request produces an ``AND`` term while multiple
invocations of a method produces a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_event_id(self, event_id, match):
"""Sets the event ``Id`` for this query.
:param event_id: an event ``Id``
:type event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``event_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_event_id_terms(self):
"""Clears the event ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
event_id_terms = property(fdel=clear_event_id_terms)
@abc.abstractmethod
def supports_event_query(self):
"""Tests if an ``EventQuery`` is available.
:return: ``true`` if an event query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_event_query(self):
"""Gets the query for an event.
Multiple retrievals produce a nested ``OR`` term.
:return: the event query
:rtype: ``osid.calendaring.EventQuery``
:raise: ``Unimplemented`` -- ``supports_event_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_event_query()`` is ``true``.*
"""
return # osid.calendaring.EventQuery
event_query = property(fget=get_event_query)
@abc.abstractmethod
def clear_event_terms(self):
"""Clears the event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
event_terms = property(fdel=clear_event_terms)
@abc.abstractmethod
def match_resource_id(self, resource_id, match):
"""Sets the resource ``Id`` for this query.
:param resource_id: a resource ``Id``
:type resource_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``resource_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_resource_id_terms(self):
"""Clears the resource ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
resource_id_terms = property(fdel=clear_resource_id_terms)
@abc.abstractmethod
def supports_resource_query(self):
"""Tests if a ``ResourceQuery`` is available for querying resources.
:return: ``true`` if a resource query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_query(self):
"""Gets the query for a resource.
Multiple retrievals produce a nested ``OR`` term.
:return: the resource query
:rtype: ``osid.resource.ResourceQuery``
:raise: ``Unimplemented`` -- ``supports_resource_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_query()`` is ``true``.*
"""
return # osid.resource.ResourceQuery
resource_query = property(fget=get_resource_query)
@abc.abstractmethod
def clear_resource_terms(self):
"""Clears the resource terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
resource_terms = property(fdel=clear_resource_terms)
@abc.abstractmethod
def match_calendar_id(self, calendar_id, match):
"""Sets the calendar ``Id`` for this query.
:param calendar_id: a calendar ``Id``
:type calendar_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``calendar_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_calendar_id_terms(self):
"""Clears the calendar ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_id_terms = property(fdel=clear_calendar_id_terms)
@abc.abstractmethod
def supports_calendar_query(self):
"""Tests if a ``CalendarQuery`` is available for querying resources.
:return: ``true`` if a calendar query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_calendar_query(self):
"""Gets the query for a calendar.
Multiple retrievals produce a nested ``OR`` term.
:return: the calendar query
:rtype: ``osid.calendaring.CalendarQuery``
:raise: ``Unimplemented`` -- ``supports_calendar_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_calendar_query()`` is ``true``.*
"""
return # osid.calendaring.CalendarQuery
calendar_query = property(fget=get_calendar_query)
@abc.abstractmethod
def clear_calendar_terms(self):
"""Clears the calendar terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
calendar_terms = property(fdel=clear_calendar_terms)
@abc.abstractmethod
def get_commitment_query_record(self, commitment_record_type):
"""Gets the commitment query record corresponding to the given ``Commitment`` record ``Type``.
Multiple retrievals produce a nested ``OR`` term.
:param commitment_record_type: a commitment query record type
:type commitment_record_type: ``osid.type.Type``
:return: the commitment query record
:rtype: ``osid.calendaring.records.CommitmentQueryRecord``
:raise: ``NullArgument`` -- ``commitment_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(commitment_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.records.CommitmentQueryRecord
class CalendarQuery:
"""This is the query for searching calendars.
Each method specifies an ``AND`` term while multiple invocations of
the same method produce a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_event_id(self, event_id, match):
"""Sets the event ``Id`` for this query.
:param event_id: an event ``Id``
:type event_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``event_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_event_id_terms(self):
"""Clears the event ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
event_id_terms = property(fdel=clear_event_id_terms)
@abc.abstractmethod
def supports_event_query(self):
"""Tests if an ``EventQuery`` is available.
:return: ``true`` if an event query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_event_query(self):
"""Gets the query for an event.
Multiple retrievals produce a nested ``OR`` term.
:return: the event query
:rtype: ``osid.calendaring.EventQuery``
:raise: ``Unimplemented`` -- ``supports_event_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_event_query()`` is ``true``.*
"""
return # osid.calendaring.EventQuery
event_query = property(fget=get_event_query)
@abc.abstractmethod
def match_any_event(self, match):
"""Matches a calendar that has any event assigned.
:param match: ``true`` to match calendars with any event, ``false`` to match calendars with no events
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_event_terms(self):
"""Clears the event terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
event_terms = property(fdel=clear_event_terms)
@abc.abstractmethod
def match_time_period_id(self, time_period_id, match):
"""Sets the time period ``Id`` for this query.
:param time_period_id: a time period ``Id``
:type time_period_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``time_period_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_time_period_id_terms(self):
"""Clears the time period ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
time_period_id_terms = property(fdel=clear_time_period_id_terms)
@abc.abstractmethod
def supports_time_period_query(self):
"""Tests if a ``TimePeriodQuery`` is available.
:return: ``true`` if a time period query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_time_period_query(self):
"""Gets the query for a time period.
Multiple retrievals produce a nested ``OR`` term.
:return: the tiem period query
:rtype: ``osid.calendaring.TimePeriodQuery``
:raise: ``Unimplemented`` -- ``supports_time_period_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_time_period_query()`` is ``true``.*
"""
return # osid.calendaring.TimePeriodQuery
time_period_query = property(fget=get_time_period_query)
@abc.abstractmethod
def match_any_time_period(self, match):
"""Matches a calendar that has any time period assigned.
:param match: ``true`` to match calendars with any time period, ``false`` to match calendars with no time
periods
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_time_period_terms(self):
"""Clears the time period terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
time_period_terms = property(fdel=clear_time_period_terms)
@abc.abstractmethod
def match_commitment_id(self, commitment_id, match):
"""Sets the commitment ``Id`` for this query.
:param commitment_id: a commitment ``Id``
:type commitment_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``commitment_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_commitment_id_terms(self):
"""Clears the commitment ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
commitment_id_terms = property(fdel=clear_commitment_id_terms)
@abc.abstractmethod
def supports_commitment_query(self):
"""Tests if a ``CommitmentQuery`` is available.
:return: ``true`` if a commitment query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_commitment_query(self):
"""Gets the query for a commitment.
Multiple retrievals produce a nested ``OR`` term.
:return: the commitment query
:rtype: ``osid.calendaring.CommitmentQuery``
:raise: ``Unimplemented`` -- ``supports_commitment_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_commitment_query()`` is ``true``.*
"""
return # osid.calendaring.CommitmentQuery
commitment_query = property(fget=get_commitment_query)
@abc.abstractmethod
def match_any_commitment(self, match):
"""Matches a calendar that has any event commitment.
:param match: ``true`` to match calendars with any commitment, ``false`` to match calendars with no commitments
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_commitment_terms(self):
"""Clears the commitment terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
commitment_terms = property(fdel=clear_commitment_terms)
@abc.abstractmethod
def match_ancestor_calendar_id(self, calendar_id, match):
"""Sets the calendar ``Id`` for this query to match calendars that have the specified calendar as an ancestor.
:param calendar_id: a calendar ``Id``
:type calendar_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``calendar_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_ancestor_calendar_id_terms(self):
"""Clears the ancestor calendar ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
ancestor_calendar_id_terms = property(fdel=clear_ancestor_calendar_id_terms)
@abc.abstractmethod
def supports_ancestor_calendar_query(self):
"""Tests if a ``CalendarQuery`` is available.
:return: ``true`` if a calendar query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_ancestor_calendar_query(self):
"""Gets the query for a calendar.
Multiple retrievals produce a nested ``OR`` term.
:return: the calendar query
:rtype: ``osid.calendaring.CalendarQuery``
:raise: ``Unimplemented`` -- ``supports_ancestor_calendar_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_ancestor_calndar_query()`` is ``true``.*
"""
return # osid.calendaring.CalendarQuery
ancestor_calendar_query = property(fget=get_ancestor_calendar_query)
@abc.abstractmethod
def match_any_ancestor_calendar(self, match):
"""Matches a calendar that has any ancestor.
:param match: ``true`` to match calendars with any ancestor, ``false`` to match root calendars
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_ancestor_calendar_terms(self):
"""Clears the ancestor calendar terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
ancestor_calendar_terms = property(fdel=clear_ancestor_calendar_terms)
@abc.abstractmethod
def match_descendant_calendar_id(self, calendar_id, match):
"""Sets the calendar ``Id`` for this query to match calendars that have the specified calendar as a descendant.
:param calendar_id: a calendar ``Id``
:type calendar_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``calendar_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_descendant_calendar_id_terms(self):
"""Clears the descendant calendar ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
descendant_calendar_id_terms = property(fdel=clear_descendant_calendar_id_terms)
@abc.abstractmethod
def supports_descendant_calendar_query(self):
"""Tests if a ``CalendarQuery``.
:return: ``true`` if a calendar query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_descendant_calendar_query(self):
"""Gets the query for a calendar.
Multiple retrievals produce a nested ``OR`` term.
:return: the calendar query
:rtype: ``osid.calendaring.CalendarQuery``
:raise: ``Unimplemented`` -- ``supports_descendant_calendar_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_descendant_calndar_query()`` is ``true``.*
"""
return # osid.calendaring.CalendarQuery
descendant_calendar_query = property(fget=get_descendant_calendar_query)
@abc.abstractmethod
def match_any_descendant_calendar(self, match):
"""Matches a calendar that has any descendant.
:param match: ``true`` to match calendars with any descendant, ``false`` to match leaf calendars
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_descendant_calendar_terms(self):
"""Clears the descendant calendar terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
descendant_calendar_terms = property(fdel=clear_descendant_calendar_terms)
@abc.abstractmethod
def get_calendar_query_record(self, calendar_record_type):
"""Gets the calendar query record corresponding to the given ``Calendar`` record ``Type``.
Multiple record retrievals produce a nested ``OR`` term.
:param calendar_record_type: a calendar record type
:type calendar_record_type: ``osid.type.Type``
:return: the calendar query record
:rtype: ``osid.calendaring.records.CalendarQueryRecord``
:raise: ``NullArgument`` -- ``calendar_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(calendar_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.records.CalendarQueryRecord
| 28.934574
| 120
| 0.632409
| 15,432
| 138,423
| 5.531752
| 0.017172
| 0.064124
| 0.07544
| 0.060352
| 0.936825
| 0.903744
| 0.866059
| 0.830975
| 0.808483
| 0.78994
| 0
| 0
| 0.257898
| 138,423
| 4,783
| 121
| 28.940623
| 0.831023
| 0.598289
| 0
| 0.797535
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.283451
| false
| 0.208627
| 0.00088
| 0
| 0.507923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
0ce0840b66e590ef2a41c729b631412a225153c7
| 12,383
|
py
|
Python
|
test/unit/agent/common/util/text.py
|
dp92987/nginx-amplify-agent
|
1b2eed6eab52a82f35974928d75044451b4bedaf
|
[
"BSD-2-Clause"
] | 308
|
2015-11-17T13:15:33.000Z
|
2022-03-24T12:03:40.000Z
|
test/unit/agent/common/util/text.py
|
dp92987/nginx-amplify-agent
|
1b2eed6eab52a82f35974928d75044451b4bedaf
|
[
"BSD-2-Clause"
] | 211
|
2015-11-16T15:27:41.000Z
|
2022-03-28T16:20:15.000Z
|
test/unit/agent/common/util/text.py
|
dp92987/nginx-amplify-agent
|
1b2eed6eab52a82f35974928d75044451b4bedaf
|
[
"BSD-2-Clause"
] | 80
|
2015-11-16T18:20:30.000Z
|
2022-03-02T12:47:56.000Z
|
# -*- coding: utf-8 -*-
from hamcrest import *
from test.base import BaseTestCase
from amplify.agent.common.util.text import (
decompose_format, parse_line, parse_line_split
)
__author__ = "Grant Hulegaard"
__copyright__ = "Copyright (C) Nginx, Inc. All rights reserved."
__license__ = ""
__maintainer__ = "Grant Hulegaard"
__email__ = "grant.hulegaard@nginx.com"
COMBINED_FORMAT = '$remote_addr - $remote_user [$time_local] "$request" ' + \
'$status $body_bytes_sent "$http_referer" "$http_user_agent"'
class UtilTextTestCase(BaseTestCase):
def test_decompose_format_regular(self):
keys, trie, non_key_patterns, first_value_is_key = decompose_format(
COMBINED_FORMAT, full=True
)
assert_that(keys, not_none())
assert_that(trie, not_none())
assert_that(non_key_patterns, not_none())
assert_that(first_value_is_key, equal_to(True))
assert_that(keys, equal_to([
'remote_addr', 'remote_user', 'time_local', 'request', 'status',
'body_bytes_sent', 'http_referer', 'http_user_agent'
]))
assert_that(non_key_patterns, equal_to([
' - ', ' [', '] "', '" ', ' ', ' "', '" "', '"'
]))
def test_decompose_format_different(self):
log_format = '$remote_addr - $remote_user [$time_local] ' + \
'"$request" $status $body_bytes_sent "$http_referer" ' + \
'"$http_user_agent" rt=$request_time ' + \
'ut="$upstream_response_time" cs=$upstream_cache_status'
keys, trie, non_key_patterns, first_value_is_key = decompose_format(log_format, full=True)
assert_that(keys, not_none())
assert_that(trie, not_none())
assert_that(non_key_patterns, not_none())
assert_that(first_value_is_key, equal_to(True))
assert_that(keys, equal_to([
'remote_addr', 'remote_user', 'time_local', 'request', 'status',
'body_bytes_sent', 'http_referer', 'http_user_agent',
'request_time', 'upstream_response_time', 'upstream_cache_status'
]))
assert_that(non_key_patterns, equal_to([
' - ', ' [', '] "', '" ', ' ', ' "', '" "', '" rt=', ' ut="',
'" cs='
]))
def test_parse_line(self):
keys, trie = decompose_format(COMBINED_FORMAT)
line = '127.0.0.1 - - [02/Jul/2015:14:49:48 +0000] "GET /basic_status HTTP/1.1" 200 110 "-" ' + \
'"python-requests/2.2.1 CPython/2.7.6 Linux/3.13.0-48-generic"'
results = parse_line(line, keys=keys, trie=trie)
assert_that(results, not_none())
for key in keys:
assert_that(results, has_item(key))
assert_that(results[key], not_none())
# check the last value to make sure complete parse
assert_that(results['http_user_agent'], equal_to(
'python-requests/2.2.1 CPython/2.7.6 Linux/3.13.0-48-generic'
))
def test_parse_line_split(self):
keys, _, non_key_patterns, first_value_is_key = decompose_format(COMBINED_FORMAT, full=True)
line = '127.0.0.1 - - [02/Jul/2015:14:49:48 +0000] "GET /basic_status HTTP/1.1" 200 110 "-" ' + \
'"python-requests/2.2.1 CPython/2.7.6 Linux/3.13.0-48-generic"'
results = parse_line_split(
line,
keys=keys,
non_key_patterns=non_key_patterns,
first_value_is_key=first_value_is_key
)
assert_that(results, not_none())
for key in keys:
assert_that(results, has_item(key))
assert_that(results[key], not_none())
# check the last value to make sure complete parse
assert_that(results['http_user_agent'], equal_to(
'python-requests/2.2.1 CPython/2.7.6 Linux/3.13.0-48-generic'
))
def test_parse_line_non_standard_http_method(self):
keys, trie = decompose_format(COMBINED_FORMAT)
line = '127.0.0.1 - - [02/Jul/2015:14:49:48 +0000] "PROPFIND /basic_status HTTP/1.1" 200 110 "-" ' + \
'"python-requests/2.2.1 CPython/2.7.6 Linux/3.13.0-48-generic"'
results = parse_line(line, keys=keys, trie=trie)
assert_that(results, not_none())
for key in keys:
assert_that(results, has_item(key))
assert_that(results[key], not_none())
# check the last value to make sure complete parse
assert_that(results['http_user_agent'], equal_to(
'python-requests/2.2.1 CPython/2.7.6 Linux/3.13.0-48-generic'
))
def test_parse_line_split_non_standard_http_method(self):
keys, _, non_key_patterns, first_value_is_key = decompose_format(
COMBINED_FORMAT, full=True
)
line = '127.0.0.1 - - [02/Jul/2015:14:49:48 +0000] "PROPFIND /basic_status HTTP/1.1" 200 110 "-" ' + \
'"python-requests/2.2.1 CPython/2.7.6 Linux/3.13.0-48-generic"'
results = parse_line_split(
line,
keys=keys,
non_key_patterns=non_key_patterns,
first_value_is_key=first_value_is_key
)
assert_that(results, not_none())
for key in keys:
assert_that(results, has_item(key))
assert_that(results[key], not_none())
# check the last value to make sure complete parse
assert_that(results['http_user_agent'], equal_to(
'python-requests/2.2.1 CPython/2.7.6 Linux/3.13.0-48-generic'
))
def test_parse_line_upstream_log_format(self):
log_format = '$remote_addr - $remote_user [$time_local] ' + \
'"$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" ' + \
'rt=$request_time ut="$upstream_response_time" cs=$upstream_cache_status'
keys, trie = decompose_format(log_format)
line = \
'1.2.3.4 - - [22/Jan/2010:19:34:21 +0300] "GET /foo/ HTTP/1.1" 200 11078 ' + \
'"http://www.rambler.ru/" "Mozilla/5.0 (Windows; U; Windows NT 5.1" rt=0.010 ut="2.001, 0.345" cs=MISS'
results = parse_line(line, keys=keys, trie=trie)
assert_that(results, not_none())
for key in keys:
assert_that(results, has_item(key))
assert_that(results[key], not_none())
# check the last value to make sure complete parse
assert_that(results['upstream_cache_status'], equal_to('MISS'))
# check some complicated values
assert_that(results['request_time'], equal_to('0.010'))
assert_that(results['upstream_response_time'], equal_to('2.001, 0.345'))
def test_parse_line_split_upstream_log_format(self):
log_format = '$remote_addr - $remote_user [$time_local] ' + \
'"$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" ' + \
'rt=$request_time ut="$upstream_response_time" cs=$upstream_cache_status'
keys, _, non_key_patterns, first_value_is_key = decompose_format(log_format, full=True)
line = \
'1.2.3.4 - - [22/Jan/2010:19:34:21 +0300] "GET /foo/ HTTP/1.1" 200 11078 ' + \
'"http://www.rambler.ru/" "Mozilla/5.0 (Windows; U; Windows NT 5.1" rt=0.010 ut="2.001, 0.345" cs=MISS'
results = parse_line_split(
line,
keys=keys,
non_key_patterns=non_key_patterns,
first_value_is_key=first_value_is_key
)
assert_that(results, not_none())
for key in keys:
assert_that(results, has_item(key))
assert_that(results[key], not_none())
# check the last value to make sure complete parse
assert_that(results['upstream_cache_status'], equal_to('MISS'))
# check some complicated values
assert_that(results['request_time'], equal_to('0.010'))
assert_that(results['upstream_response_time'], equal_to('2.001, 0.345'))
def test_parse_line_upstream_log_format_empty_upstreams(self):
log_format = '$remote_addr - $remote_user [$time_local] ' + \
'"$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" ' + \
'rt=$request_time cs=$upstream_cache_status ut="$upstream_response_time"'
keys, trie = decompose_format(log_format)
line = \
'1.2.3.4 - - [22/Jan/2010:19:34:21 +0300] "GET /foo/ HTTP/1.1" 200 11078 ' + \
'"http://www.rambler.ru/" "Mozilla/5.0 (Windows; U; Windows NT 5.1" rt=0.010 cs=- ut="-"'
results = parse_line(line, keys=keys, trie=trie)
assert_that(results, not_none())
for key in keys:
assert_that(results, has_item(key))
assert_that(results[key], not_none())
# check the last value to make sure complete parse
assert_that(results['upstream_response_time'], equal_to('-'))
assert_that(results['upstream_cache_status'], equal_to('-'))
def test_parse_line_split_upstream_log_format_empty_upstreams(self):
log_format = '$remote_addr - $remote_user [$time_local] ' + \
'"$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" ' + \
'rt=$request_time cs=$upstream_cache_status ut="$upstream_response_time"'
keys, _, non_key_patterns, first_value_is_key = decompose_format(
log_format, full=True
)
line = \
'1.2.3.4 - - [22/Jan/2010:19:34:21 +0300] "GET /foo/ HTTP/1.1" 200 11078 ' + \
'"http://www.rambler.ru/" "Mozilla/5.0 (Windows; U; Windows NT 5.1" rt=0.010 cs=- ut="-"'
results = parse_line_split(
line,
keys=keys,
non_key_patterns=non_key_patterns,
first_value_is_key=first_value_is_key
)
assert_that(results, not_none())
for key in keys:
assert_that(results, has_item(key))
assert_that(results[key], not_none())
# check the last value to make sure complete parse
assert_that(results['upstream_response_time'], equal_to('-'))
assert_that(results['upstream_cache_status'], equal_to('-'))
def test_parse_line_upstream_log_format_part_empty_upstreams(self):
log_format = '$remote_addr - $remote_user [$time_local] ' + \
'"$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" ' + \
'rt=$request_time ut="$upstream_response_time" cs=$upstream_cache_status'
keys, trie = decompose_format(log_format)
line = \
'1.2.3.4 - - [22/Jan/2010:19:34:21 +0300] "GET /foo/ HTTP/1.1" 200 11078 ' + \
'"http://www.rambler.ru/" "Mozilla/5.0 (Windows; U; Windows NT 5.1" rt=0.010 ut="-" cs=MISS'
results = parse_line(line, keys=keys, trie=trie)
assert_that(results, not_none())
for key in keys:
assert_that(results, has_item(key))
assert_that(results[key], not_none())
# check the last value to make sure complete parse
assert_that(results['upstream_cache_status'], equal_to('MISS'))
def test_parse_line_split_upstream_log_format_part_empty_upstreams(self):
log_format = '$remote_addr - $remote_user [$time_local] ' + \
'"$request" $status $body_bytes_sent "$http_referer" "$http_user_agent" ' + \
'rt=$request_time ut="$upstream_response_time" cs=$upstream_cache_status'
keys, _, non_key_patterns, first_value_is_key = decompose_format(log_format, full=True)
line = \
'1.2.3.4 - - [22/Jan/2010:19:34:21 +0300] "GET /foo/ HTTP/1.1" 200 11078 ' + \
'"http://www.rambler.ru/" "Mozilla/5.0 (Windows; U; Windows NT 5.1" rt=0.010 ut="-" cs=MISS'
results = parse_line_split(
line,
keys=keys,
non_key_patterns=non_key_patterns,
first_value_is_key=first_value_is_key
)
assert_that(results, not_none())
for key in keys:
assert_that(results, has_item(key))
assert_that(results[key], not_none())
# check the last value to make sure complete parse
assert_that(results['upstream_cache_status'], equal_to('MISS'))
| 42.407534
| 115
| 0.605023
| 1,650
| 12,383
| 4.230303
| 0.085455
| 0.083095
| 0.112034
| 0.040831
| 0.938252
| 0.938252
| 0.932235
| 0.923352
| 0.918911
| 0.918911
| 0
| 0.053244
| 0.264395
| 12,383
| 291
| 116
| 42.553265
| 0.713031
| 0.046112
| 0
| 0.785047
| 0
| 0.11215
| 0.334322
| 0.10367
| 0
| 0
| 0
| 0
| 0.271028
| 1
| 0.056075
| false
| 0
| 0.014019
| 0
| 0.074766
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ce9ddf8982fdd13b64038e356850186f884758e
| 4,462
|
py
|
Python
|
go/apps/http_api/tests/test_views.py
|
lynnUg/vumi-go
|
852f906c46d5d26940bd6699f11488b73bbc3742
|
[
"BSD-3-Clause"
] | null | null | null |
go/apps/http_api/tests/test_views.py
|
lynnUg/vumi-go
|
852f906c46d5d26940bd6699f11488b73bbc3742
|
[
"BSD-3-Clause"
] | null | null | null |
go/apps/http_api/tests/test_views.py
|
lynnUg/vumi-go
|
852f906c46d5d26940bd6699f11488b73bbc3742
|
[
"BSD-3-Clause"
] | null | null | null |
from go.apps.tests.view_helpers import AppViewsHelper
from go.base.tests.helpers import GoDjangoTestCase
class TestHttpApiViews(GoDjangoTestCase):
def setUp(self):
self.app_helper = self.add_helper(AppViewsHelper(u'http_api'))
self.client = self.app_helper.get_client()
def test_show_stopped(self):
"""
Test showing the conversation
"""
conv_helper = self.app_helper.create_conversation_helper(
name=u"myconv")
response = self.client.get(conv_helper.get_view_url('show'))
self.assertContains(response, u"<h1>myconv</h1>")
def test_show_running(self):
"""
Test showing the conversation
"""
conv_helper = self.app_helper.create_conversation_helper(
name=u"myconv", started=True)
response = self.client.get(conv_helper.get_view_url('show'))
self.assertContains(response, u"<h1>myconv</h1>")
def test_edit_view(self):
conv_helper = self.app_helper.create_conversation_helper()
conversation = conv_helper.get_conversation()
self.assertEqual(conversation.config, {})
response = self.client.post(conv_helper.get_view_url('edit'), {
'http_api-api_tokens': 'token',
'http_api-push_message_url': 'http://messages/',
'http_api-push_event_url': 'http://events/',
'http_api-metric_store': 'foo_metric_store',
}, follow=True)
self.assertRedirects(response, conv_helper.get_view_url('show'))
reloaded_conv = conv_helper.get_conversation()
self.assertEqual(reloaded_conv.config, {
'http_api': {
'push_event_url': 'http://events/',
'push_message_url': 'http://messages/',
'api_tokens': ['token'],
'metric_store': 'foo_metric_store',
'ignore_events': False,
'ignore_messages': False,
}
})
def test_edit_view_no_event_url(self):
conv_helper = self.app_helper.create_conversation_helper()
conversation = conv_helper.get_conversation()
self.assertEqual(conversation.config, {})
response = self.client.post(conv_helper.get_view_url('edit'), {
'http_api-api_tokens': 'token',
'http_api-push_message_url': 'http://messages/',
'http_api-push_event_url': '',
'http_api-metric_store': 'foo_metric_store',
})
self.assertRedirects(response, conv_helper.get_view_url('show'))
reloaded_conv = conv_helper.get_conversation()
self.assertEqual(reloaded_conv.config, {
'http_api': {
'push_event_url': None,
'push_message_url': 'http://messages/',
'api_tokens': ['token'],
'metric_store': 'foo_metric_store',
'ignore_events': False,
'ignore_messages': False,
}
})
self.assertEqual(conversation.config, {})
response = self.client.get(conv_helper.get_view_url('edit'))
self.assertContains(response, 'http://messages/')
self.assertContains(response, 'foo_metric_store')
self.assertEqual(response.status_code, 200)
def test_edit_view_no_push_urls(self):
conv_helper = self.app_helper.create_conversation_helper()
conversation = conv_helper.get_conversation()
self.assertEqual(conversation.config, {})
response = self.client.post(conv_helper.get_view_url('edit'), {
'http_api-api_tokens': 'token',
'http_api-push_message_url': '',
'http_api-push_event_url': '',
'http_api-metric_store': 'foo_metric_store',
})
self.assertRedirects(response, conv_helper.get_view_url('show'))
reloaded_conv = conv_helper.get_conversation()
self.assertEqual(reloaded_conv.config, {
'http_api': {
'push_event_url': None,
'push_message_url': None,
'api_tokens': ['token'],
'metric_store': 'foo_metric_store',
'ignore_events': False,
'ignore_messages': False,
}
})
self.assertEqual(conversation.config, {})
response = self.client.get(conv_helper.get_view_url('edit'))
self.assertContains(response, 'foo_metric_store')
self.assertEqual(response.status_code, 200)
| 40.93578
| 72
| 0.61385
| 482
| 4,462
| 5.340249
| 0.134855
| 0.081585
| 0.080808
| 0.066045
| 0.879953
| 0.866744
| 0.866744
| 0.848096
| 0.848096
| 0.848096
| 0
| 0.003042
| 0.263335
| 4,462
| 108
| 73
| 41.314815
| 0.780043
| 0.013223
| 0
| 0.758242
| 0
| 0
| 0.202938
| 0.047521
| 0
| 0
| 0
| 0
| 0.197802
| 1
| 0.065934
| false
| 0
| 0.021978
| 0
| 0.098901
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0b519f8596f5bf7ee53103adc8d550ce1fb62540
| 68,172
|
py
|
Python
|
tests/test_generate_unique_id_function.py
|
ssensalo/fastapi
|
146f57b8f70c5757dc20edc716dba1b96936a8d6
|
[
"MIT"
] | 1
|
2022-01-08T16:39:28.000Z
|
2022-01-08T16:39:28.000Z
|
tests/test_generate_unique_id_function.py
|
ssensalo/fastapi
|
146f57b8f70c5757dc20edc716dba1b96936a8d6
|
[
"MIT"
] | 1
|
2022-01-07T21:04:04.000Z
|
2022-01-07T21:04:04.000Z
|
tests/test_generate_unique_id_function.py
|
ssensalo/fastapi
|
146f57b8f70c5757dc20edc716dba1b96936a8d6
|
[
"MIT"
] | null | null | null |
import warnings
from typing import List
from fastapi import APIRouter, FastAPI
from fastapi.routing import APIRoute
from fastapi.testclient import TestClient
from pydantic import BaseModel
def custom_generate_unique_id(route: APIRoute):
return f"foo_{route.name}"
def custom_generate_unique_id2(route: APIRoute):
return f"bar_{route.name}"
def custom_generate_unique_id3(route: APIRoute):
return f"baz_{route.name}"
class Item(BaseModel):
name: str
price: float
class Message(BaseModel):
title: str
description: str
def test_top_level_generate_unique_id():
app = FastAPI(generate_unique_id_function=custom_generate_unique_id)
router = APIRouter()
@app.post("/", response_model=List[Item], responses={404: {"model": List[Message]}})
def post_root(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
@router.post(
"/router", response_model=List[Item], responses={404: {"model": List[Message]}}
)
def post_router(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
app.include_router(router)
client = TestClient(app)
response = client.get("/openapi.json")
data = response.json()
assert data == {
"openapi": "3.0.2",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/": {
"post": {
"summary": "Post Root",
"operationId": "foo_post_root",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_foo_post_root"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Foo Post Root",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Foo Post Root",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
"/router": {
"post": {
"summary": "Post Router",
"operationId": "foo_post_router",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_foo_post_router"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Foo Post Router",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Foo Post Router",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
},
"components": {
"schemas": {
"Body_foo_post_root": {
"title": "Body_foo_post_root",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"Body_foo_post_router": {
"title": "Body_foo_post_router",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
"Item": {
"title": "Item",
"required": ["name", "price"],
"type": "object",
"properties": {
"name": {"title": "Name", "type": "string"},
"price": {"title": "Price", "type": "number"},
},
},
"Message": {
"title": "Message",
"required": ["title", "description"],
"type": "object",
"properties": {
"title": {"title": "Title", "type": "string"},
"description": {"title": "Description", "type": "string"},
},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
}
},
}
def test_router_overrides_generate_unique_id():
app = FastAPI(generate_unique_id_function=custom_generate_unique_id)
router = APIRouter(generate_unique_id_function=custom_generate_unique_id2)
@app.post("/", response_model=List[Item], responses={404: {"model": List[Message]}})
def post_root(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
@router.post(
"/router", response_model=List[Item], responses={404: {"model": List[Message]}}
)
def post_router(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
app.include_router(router)
client = TestClient(app)
response = client.get("/openapi.json")
data = response.json()
assert data == {
"openapi": "3.0.2",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/": {
"post": {
"summary": "Post Root",
"operationId": "foo_post_root",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_foo_post_root"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Foo Post Root",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Foo Post Root",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
"/router": {
"post": {
"summary": "Post Router",
"operationId": "bar_post_router",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_bar_post_router"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Bar Post Router",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Bar Post Router",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
},
"components": {
"schemas": {
"Body_bar_post_router": {
"title": "Body_bar_post_router",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"Body_foo_post_root": {
"title": "Body_foo_post_root",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
"Item": {
"title": "Item",
"required": ["name", "price"],
"type": "object",
"properties": {
"name": {"title": "Name", "type": "string"},
"price": {"title": "Price", "type": "number"},
},
},
"Message": {
"title": "Message",
"required": ["title", "description"],
"type": "object",
"properties": {
"title": {"title": "Title", "type": "string"},
"description": {"title": "Description", "type": "string"},
},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
}
},
}
def test_router_include_overrides_generate_unique_id():
app = FastAPI(generate_unique_id_function=custom_generate_unique_id)
router = APIRouter(generate_unique_id_function=custom_generate_unique_id2)
@app.post("/", response_model=List[Item], responses={404: {"model": List[Message]}})
def post_root(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
@router.post(
"/router", response_model=List[Item], responses={404: {"model": List[Message]}}
)
def post_router(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
app.include_router(router, generate_unique_id_function=custom_generate_unique_id3)
client = TestClient(app)
response = client.get("/openapi.json")
data = response.json()
assert data == {
"openapi": "3.0.2",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/": {
"post": {
"summary": "Post Root",
"operationId": "foo_post_root",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_foo_post_root"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Foo Post Root",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Foo Post Root",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
"/router": {
"post": {
"summary": "Post Router",
"operationId": "bar_post_router",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_bar_post_router"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Bar Post Router",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Bar Post Router",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
},
"components": {
"schemas": {
"Body_bar_post_router": {
"title": "Body_bar_post_router",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"Body_foo_post_root": {
"title": "Body_foo_post_root",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
"Item": {
"title": "Item",
"required": ["name", "price"],
"type": "object",
"properties": {
"name": {"title": "Name", "type": "string"},
"price": {"title": "Price", "type": "number"},
},
},
"Message": {
"title": "Message",
"required": ["title", "description"],
"type": "object",
"properties": {
"title": {"title": "Title", "type": "string"},
"description": {"title": "Description", "type": "string"},
},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
}
},
}
def test_subrouter_top_level_include_overrides_generate_unique_id():
app = FastAPI(generate_unique_id_function=custom_generate_unique_id)
router = APIRouter()
sub_router = APIRouter(generate_unique_id_function=custom_generate_unique_id2)
@app.post("/", response_model=List[Item], responses={404: {"model": List[Message]}})
def post_root(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
@router.post(
"/router", response_model=List[Item], responses={404: {"model": List[Message]}}
)
def post_router(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
@sub_router.post(
"/subrouter",
response_model=List[Item],
responses={404: {"model": List[Message]}},
)
def post_subrouter(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
router.include_router(sub_router)
app.include_router(router, generate_unique_id_function=custom_generate_unique_id3)
client = TestClient(app)
response = client.get("/openapi.json")
data = response.json()
assert data == {
"openapi": "3.0.2",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/": {
"post": {
"summary": "Post Root",
"operationId": "foo_post_root",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_foo_post_root"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Foo Post Root",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Foo Post Root",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
"/router": {
"post": {
"summary": "Post Router",
"operationId": "baz_post_router",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_baz_post_router"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Baz Post Router",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Baz Post Router",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
"/subrouter": {
"post": {
"summary": "Post Subrouter",
"operationId": "bar_post_subrouter",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_bar_post_subrouter"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Bar Post Subrouter",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Bar Post Subrouter",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
},
"components": {
"schemas": {
"Body_bar_post_subrouter": {
"title": "Body_bar_post_subrouter",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"Body_baz_post_router": {
"title": "Body_baz_post_router",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"Body_foo_post_root": {
"title": "Body_foo_post_root",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
"Item": {
"title": "Item",
"required": ["name", "price"],
"type": "object",
"properties": {
"name": {"title": "Name", "type": "string"},
"price": {"title": "Price", "type": "number"},
},
},
"Message": {
"title": "Message",
"required": ["title", "description"],
"type": "object",
"properties": {
"title": {"title": "Title", "type": "string"},
"description": {"title": "Description", "type": "string"},
},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
}
},
}
def test_router_path_operation_overrides_generate_unique_id():
app = FastAPI(generate_unique_id_function=custom_generate_unique_id)
router = APIRouter(generate_unique_id_function=custom_generate_unique_id2)
@app.post("/", response_model=List[Item], responses={404: {"model": List[Message]}})
def post_root(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
@router.post(
"/router",
response_model=List[Item],
responses={404: {"model": List[Message]}},
generate_unique_id_function=custom_generate_unique_id3,
)
def post_router(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
app.include_router(router)
client = TestClient(app)
response = client.get("/openapi.json")
data = response.json()
assert data == {
"openapi": "3.0.2",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/": {
"post": {
"summary": "Post Root",
"operationId": "foo_post_root",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_foo_post_root"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Foo Post Root",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Foo Post Root",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
"/router": {
"post": {
"summary": "Post Router",
"operationId": "baz_post_router",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_baz_post_router"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Baz Post Router",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Baz Post Router",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
},
"components": {
"schemas": {
"Body_baz_post_router": {
"title": "Body_baz_post_router",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"Body_foo_post_root": {
"title": "Body_foo_post_root",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
"Item": {
"title": "Item",
"required": ["name", "price"],
"type": "object",
"properties": {
"name": {"title": "Name", "type": "string"},
"price": {"title": "Price", "type": "number"},
},
},
"Message": {
"title": "Message",
"required": ["title", "description"],
"type": "object",
"properties": {
"title": {"title": "Title", "type": "string"},
"description": {"title": "Description", "type": "string"},
},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
}
},
}
def test_app_path_operation_overrides_generate_unique_id():
app = FastAPI(generate_unique_id_function=custom_generate_unique_id)
router = APIRouter(generate_unique_id_function=custom_generate_unique_id2)
@app.post(
"/",
response_model=List[Item],
responses={404: {"model": List[Message]}},
generate_unique_id_function=custom_generate_unique_id3,
)
def post_root(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
@router.post(
"/router",
response_model=List[Item],
responses={404: {"model": List[Message]}},
)
def post_router(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
app.include_router(router)
client = TestClient(app)
response = client.get("/openapi.json")
data = response.json()
assert data == {
"openapi": "3.0.2",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/": {
"post": {
"summary": "Post Root",
"operationId": "baz_post_root",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_baz_post_root"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Baz Post Root",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Baz Post Root",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
"/router": {
"post": {
"summary": "Post Router",
"operationId": "bar_post_router",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_bar_post_router"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Bar Post Router",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Bar Post Router",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
},
"components": {
"schemas": {
"Body_bar_post_router": {
"title": "Body_bar_post_router",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"Body_baz_post_root": {
"title": "Body_baz_post_root",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
"Item": {
"title": "Item",
"required": ["name", "price"],
"type": "object",
"properties": {
"name": {"title": "Name", "type": "string"},
"price": {"title": "Price", "type": "number"},
},
},
"Message": {
"title": "Message",
"required": ["title", "description"],
"type": "object",
"properties": {
"title": {"title": "Title", "type": "string"},
"description": {"title": "Description", "type": "string"},
},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
}
},
}
def test_callback_override_generate_unique_id():
app = FastAPI(generate_unique_id_function=custom_generate_unique_id)
callback_router = APIRouter(generate_unique_id_function=custom_generate_unique_id2)
@callback_router.post(
"/post-callback",
response_model=List[Item],
responses={404: {"model": List[Message]}},
generate_unique_id_function=custom_generate_unique_id3,
)
def post_callback(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
@app.post(
"/",
response_model=List[Item],
responses={404: {"model": List[Message]}},
generate_unique_id_function=custom_generate_unique_id3,
callbacks=callback_router.routes,
)
def post_root(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
@app.post(
"/tocallback",
response_model=List[Item],
responses={404: {"model": List[Message]}},
)
def post_with_callback(item1: Item, item2: Item):
return item1, item2 # pragma: nocover
client = TestClient(app)
response = client.get("/openapi.json")
data = response.json()
assert data == {
"openapi": "3.0.2",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/": {
"post": {
"summary": "Post Root",
"operationId": "baz_post_root",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_baz_post_root"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Baz Post Root",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Baz Post Root",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"callbacks": {
"post_callback": {
"/post-callback": {
"post": {
"summary": "Post Callback",
"operationId": "baz_post_callback",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_baz_post_callback"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Baz Post Callback",
"type": "array",
"items": {
"$ref": "#/components/schemas/Item"
},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Baz Post Callback",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
}
}
},
}
},
"/tocallback": {
"post": {
"summary": "Post With Callback",
"operationId": "foo_post_with_callback",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_foo_post_with_callback"
}
}
},
"required": True,
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"title": "Response Foo Post With Callback",
"type": "array",
"items": {"$ref": "#/components/schemas/Item"},
}
}
},
},
"404": {
"description": "Not Found",
"content": {
"application/json": {
"schema": {
"title": "Response 404 Foo Post With Callback",
"type": "array",
"items": {
"$ref": "#/components/schemas/Message"
},
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
}
},
},
"components": {
"schemas": {
"Body_baz_post_callback": {
"title": "Body_baz_post_callback",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"Body_baz_post_root": {
"title": "Body_baz_post_root",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"Body_foo_post_with_callback": {
"title": "Body_foo_post_with_callback",
"required": ["item1", "item2"],
"type": "object",
"properties": {
"item1": {"$ref": "#/components/schemas/Item"},
"item2": {"$ref": "#/components/schemas/Item"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
"Item": {
"title": "Item",
"required": ["name", "price"],
"type": "object",
"properties": {
"name": {"title": "Name", "type": "string"},
"price": {"title": "Price", "type": "number"},
},
},
"Message": {
"title": "Message",
"required": ["title", "description"],
"type": "object",
"properties": {
"title": {"title": "Title", "type": "string"},
"description": {"title": "Description", "type": "string"},
},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
}
},
}
def test_warn_duplicate_operation_id():
def broken_operation_id(route: APIRoute):
return "foo"
app = FastAPI(generate_unique_id_function=broken_operation_id)
@app.post("/")
def post_root(item1: Item):
return item1 # pragma: nocover
@app.post("/second")
def post_second(item1: Item):
return item1 # pragma: nocover
@app.post("/third")
def post_third(item1: Item):
return item1 # pragma: nocover
client = TestClient(app)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
client.get("/openapi.json")
assert len(w) == 2
assert issubclass(w[-1].category, UserWarning)
assert "Duplicate Operation ID" in str(w[-1].message)
| 41.772059
| 106
| 0.285161
| 3,252
| 68,172
| 5.851476
| 0.038438
| 0.098271
| 0.108256
| 0.094172
| 0.944401
| 0.940197
| 0.930948
| 0.930948
| 0.926743
| 0.925797
| 0
| 0.015787
| 0.597679
| 68,172
| 1,631
| 107
| 41.79767
| 0.678018
| 0.004445
| 0
| 0.646574
| 0
| 0
| 0.226547
| 0.049522
| 0
| 0
| 0
| 0
| 0.006345
| 1
| 0.01967
| false
| 0
| 0.003807
| 0.014594
| 0.041878
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0b5db17336f788ad1d51e0ebfedab480c4c72a7e
| 2,068
|
py
|
Python
|
quiz/models.py
|
jzi040941/django_quiz
|
465d29c74e3ff6814f686296d225f18a50c99b9a
|
[
"MIT"
] | 1
|
2018-03-14T16:43:00.000Z
|
2018-03-14T16:43:00.000Z
|
quiz/models.py
|
jzi040941/django_quiz
|
465d29c74e3ff6814f686296d225f18a50c99b9a
|
[
"MIT"
] | null | null | null |
quiz/models.py
|
jzi040941/django_quiz
|
465d29c74e3ff6814f686296d225f18a50c99b9a
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class quiz_short(models.Model):
AssignNum = models.ForeignKey('teacher.Assignment', on_delete=models.CASCADE)
Question = models.TextField()
Answer = models.TextField()
def __str__(self):
return "AssignNum : %s, question: %s Answer: %s" % (self.AssignNum, self.Question, self.Answer)
class quiz_one(models.Model):
AssignNum = models.ForeignKey('teacher.Assignment', on_delete=models.CASCADE)
Question = models.TextField()
# Check = models.CharField(max_length=7, choices=CHECK_LIST)
Check = models.IntegerField(null=True,blank=True)
'''
Check_1 = models.BooleanField()
Check_2 = models.BooleanField()
Check_3 = models.BooleanField()
Check_4 = models.BooleanField()
'''
Selection_1 = models.TextField()
Selection_2 = models.TextField()
Selection_3 = models.TextField()
Selection_4 = models.TextField()
def __str__(self):
return "AssignNum : %s, question: %s Selection_1: %s" % (self.AssignNum, self.Question, self.Selection_1)
'''
class quiz_one(models.Model):
AssignNum = models.ForeignKey('teacher.Assignment', on_delete=models.CASCADE)
Question = models.TextField()
Answer = models.TextField()
Wrong_1 = models.TextField()
Wrong_2 = models.TextField()
Wrong_3 = models.TextField()
def __str__(self):
return "AssignNum : %s, question: %s Answer: %s" % (self.AssignNum, self.Question, self.Answer)
'''
class quiz_multi(models.Model):
AssignNum = models.ForeignKey('teacher.Assignment', on_delete=models.CASCADE)
Question = models.TextField()
Check_1 = models.BooleanField()
Check_2 = models.BooleanField()
Check_3 = models.BooleanField()
Check_4 = models.BooleanField()
Selection_1 = models.TextField()
Selection_2 = models.TextField()
Selection_3 = models.TextField()
Selection_4 = models.TextField()
def __str__(self):
return "AssignNum : %s, question: %s Selection_1: %s" % (self.AssignNum, self.Question, self.Selection_1)
| 35.655172
| 113
| 0.696325
| 243
| 2,068
| 5.72428
| 0.193416
| 0.183321
| 0.099209
| 0.074766
| 0.86197
| 0.86197
| 0.86197
| 0.86197
| 0.86197
| 0.86197
| 0
| 0.014068
| 0.175048
| 2,068
| 57
| 114
| 36.280702
| 0.80129
| 0.041103
| 0
| 0.633333
| 0
| 0
| 0.128096
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.033333
| 0.1
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
0b5e2ce14cd1b7d0c4bdab1dbcbd6268fb51f4f1
| 165
|
py
|
Python
|
benchmark/VAR/GG/common.py
|
victor-estrade/SystGradDescent
|
822e7094290301ec47a99433381a8d6406798aff
|
[
"MIT"
] | 2
|
2019-03-20T09:05:02.000Z
|
2019-03-20T15:23:44.000Z
|
benchmark/VAR/GG/common.py
|
victor-estrade/SystGradDescent
|
822e7094290301ec47a99433381a8d6406798aff
|
[
"MIT"
] | null | null | null |
benchmark/VAR/GG/common.py
|
victor-estrade/SystGradDescent
|
822e7094290301ec47a99433381a8d6406798aff
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from __future__ import unicode_literals
| 27.5
| 39
| 0.866667
| 22
| 165
| 5.636364
| 0.545455
| 0.322581
| 0.516129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006849
| 0.115152
| 165
| 5
| 40
| 33
| 0.842466
| 0.078788
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0b77f76b149075d4d3817aa9211f7115e499a12a
| 273
|
py
|
Python
|
tests/parser/rewriting.projection.4.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/rewriting.projection.4.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/rewriting.projection.4.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
f(X,1) :- a(X,Y),
g(A,X),g(B,X),
not f(1,X).
a(X,Y) :- g(X,0),g(Y,0).
g(x1,0).
g(x2,0).
"""
output = """
f(X,1) :- a(X,Y),
g(A,X),g(B,X),
not f(1,X).
a(X,Y) :- g(X,0),g(Y,0).
g(x1,0).
g(x2,0).
"""
| 11.869565
| 25
| 0.296703
| 64
| 273
| 1.265625
| 0.203125
| 0.148148
| 0.148148
| 0.197531
| 0.864198
| 0.864198
| 0.864198
| 0.864198
| 0.864198
| 0.864198
| 0
| 0.089385
| 0.344322
| 273
| 22
| 26
| 12.409091
| 0.363128
| 0
| 0
| 0.875
| 0
| 0
| 0.878431
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
0b9113a200832679e9fc55536bc662bb2d860b4c
| 228
|
py
|
Python
|
satyrus/sat/types/string.py
|
lucasvg/Satyrus3-FinalProject-EspTopsOTM
|
024785752abdc46e3463d8c94df7c3da873c354d
|
[
"MIT"
] | null | null | null |
satyrus/sat/types/string.py
|
lucasvg/Satyrus3-FinalProject-EspTopsOTM
|
024785752abdc46e3463d8c94df7c3da873c354d
|
[
"MIT"
] | null | null | null |
satyrus/sat/types/string.py
|
lucasvg/Satyrus3-FinalProject-EspTopsOTM
|
024785752abdc46e3463d8c94df7c3da873c354d
|
[
"MIT"
] | null | null | null |
from .main import SatType
class String(SatType, str):
def __new__(cls, *args, **kwargs):
return str.__new__(cls, *args, **kwargs)
def __init__(self, *args, **kwargs):
SatType.__init__(self)
| 25.333333
| 49
| 0.605263
| 27
| 228
| 4.518519
| 0.555556
| 0.245902
| 0.163934
| 0.262295
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.254386
| 228
| 9
| 50
| 25.333333
| 0.717647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
0ba1dc47fec515daa7ce78ab6cbd344fd812af6f
| 113,179
|
py
|
Python
|
networking_vsphere/tests/unit/agent/test_ovsvapp_agent.py
|
Mirantis/vmware-dvs
|
37b874f9bf40b47d0de231c640367275fb3afb9b
|
[
"Apache-2.0"
] | 8
|
2015-04-23T15:36:56.000Z
|
2019-03-06T13:23:28.000Z
|
networking_vsphere/tests/unit/agent/test_ovsvapp_agent.py
|
Mirantis/vmware-dvs
|
37b874f9bf40b47d0de231c640367275fb3afb9b
|
[
"Apache-2.0"
] | 1
|
2016-10-04T13:24:50.000Z
|
2016-10-04T13:24:50.000Z
|
networking_vsphere/tests/unit/agent/test_ovsvapp_agent.py
|
Mirantis/vmware-dvs
|
37b874f9bf40b47d0de231c640367275fb3afb9b
|
[
"Apache-2.0"
] | 19
|
2015-09-15T13:25:01.000Z
|
2019-09-03T08:23:21.000Z
|
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import time
import logging
from oslo_config import cfg
from networking_vsphere.agent import ovsvapp_agent
from networking_vsphere.common import constants as ovsvapp_const
from networking_vsphere.common import error
from networking_vsphere.tests import base
from networking_vsphere.tests.unit.drivers import fake_manager
from networking_vsphere.utils import resource_util
from neutron.agent.common import ovs_lib
from neutron.common import utils as n_utils
from neutron.plugins.common import constants as p_const
from neutron.plugins.common import utils as p_utils
from neutron.plugins.ml2.drivers.openvswitch.agent import ovs_neutron_agent as ovs_agent # noqa
from neutron.plugins.ml2.drivers.openvswitch.agent import vlanmanager
NETWORK_ID = 'fake_net_id'
VNIC_ADDED = 'VNIC_ADDED'
FAKE_DEVICE_ID = 'fake_device_id'
FAKE_VM = 'fake_vm'
FAKE_HOST_1 = 'fake_host_1'
FAKE_HOST_2 = 'fake_host_2'
FAKE_CLUSTER_MOID = 'fake_cluster_moid'
FAKE_CLUSTER_1 = 'fake_cluster_1'
FAKE_CLUSTER_2 = 'fake_cluster_2'
FAKE_VCENTER = 'fake_vcenter'
FAKE_PORT_1 = 'fake_port_1'
FAKE_PORT_2 = 'fake_port_2'
FAKE_PORT_3 = 'fake_port_3'
FAKE_PORT_4 = 'fake_port_4'
MAC_ADDRESS = '01:02:03:04:05:06'
FAKE_CONTEXT = 'fake_context'
FAKE_SG = {'fake_sg': 'fake_sg_rule'}
FAKE_SG_RULE = {'security_group_source_groups': ['fake_rule_1',
'fake_rule_2',
'fake_rule_3'],
'security_group_rules': [
{'ethertype': 'IPv4',
'direction': 'egress',
'security_group_id': 'fake_id'
}],
'sg_provider_rules': [
{'ethertype': 'IPv4',
'direction': 'egress',
'source_port_range_min': 67,
'source_port_range_max': 67,
'port_range_min': 68,
'port_range_max': 68
}]
}
FAKE_SG_RULES = {FAKE_PORT_1: FAKE_SG_RULE}
FAKE_SG_RULES_MULTI_PORTS = {FAKE_PORT_1: FAKE_SG_RULE,
FAKE_PORT_2: FAKE_SG_RULE
}
FAKE_SG_RULES_MISSING = {FAKE_PORT_1: {'security_group_source_groups': [
'fake_rule_1',
'fake_rule_2',
'fake_rule_3'],
'sg_provider_rules': [],
'security_group_rules': [
{'ethertype': 'IPv4',
'direction': 'egress'
}]
}
}
FAKE_SG_RULES_PARTIAL = {FAKE_PORT_1: {'security_group_source_groups': [
'fake_rule_1',
'fake_rule_2',
'fake_rule_3'],
'sg_provider_rules': [],
'security_group_rules': [
{'ethertype': 'IPv4',
'direction': 'egress',
'port_range_min': 22,
'port_range_max': 22
}]
}
}
DEVICE = {'id': FAKE_DEVICE_ID,
'cluster_id': FAKE_CLUSTER_1,
'host': FAKE_HOST_1,
'vcenter': FAKE_VCENTER}
class SampleEvent(object):
def __init__(self, type, host, cluster, srcobj, host_changed=False):
self.event_type = type
self.host_name = host
self.cluster_id = cluster
self.src_obj = srcobj
self.host_changed = host_changed
class VM(object):
def __init__(self, uuid, vnics):
self.uuid = uuid
self.vnics = vnics
class SamplePort(object):
def __init__(self, port_uuid, mac_address=None, pg_id=None):
self.port_uuid = port_uuid
self.mac_address = mac_address
self.pg_id = pg_id
class SamplePortUIDMac(object):
def __init__(self, port_uuid, mac_address):
self.port_uuid = port_uuid
self.mac_address = mac_address
class TestOVSvAppAgentRestart(base.TestCase):
@mock.patch('neutron.common.config.init')
@mock.patch('neutron.common.config.setup_logging')
@mock.patch('neutron.agent.ovsdb.api.'
'API.get')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.RpcPluginApi')
@mock.patch('neutron.agent.securitygroups_rpc.SecurityGroupServerRpcApi')
@mock.patch('neutron.agent.rpc.PluginReportStateAPI')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.OVSvAppPluginApi')
@mock.patch('neutron.context.get_admin_context_without_session')
@mock.patch('neutron.agent.rpc.create_consumers')
@mock.patch('neutron.plugins.ml2.drivers.openvswitch.agent.'
'ovs_neutron_agent.OVSNeutronAgent.setup_integration_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_ovs_bridges')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_security_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent._init_ovs_flows')
@mock.patch('networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.'
'check_ovs_firewall_restart')
@mock.patch('networking_vsphere.drivers.ovs_firewall.'
'OVSFirewallDriver.setup_base_flows')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.create')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.set_secure_mode')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.get_port_ofport')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.OVSvAppAgent.__init__')
def setUp(self, mock_ovs_init, mock_get_port_ofport,
mock_set_secure_mode, mock_create_ovs_bridge,
mock_setup_base_flows, mock_check_ovs_firewall_restart,
mock_init_ovs_flows, mock_setup_security_br,
mock_setup_ovs_bridges,
mock_setup_integration_br, mock_create_consumers,
mock_get_admin_context_without_session, mock_ovsvapp_pluginapi,
mock_plugin_report_stateapi, mock_securitygroup_server_rpcapi,
mock_rpc_pluginapi, mock_ovsdb_api, mock_setup_logging,
mock_init):
super(TestOVSvAppAgentRestart, self).setUp()
cfg.CONF.set_override('security_bridge_mapping',
"fake_sec_br:fake_if", 'SECURITYGROUP')
mock_get_port_ofport.return_value = 5
mock_ovs_init.return_value = None
self.agent = ovsvapp_agent.OVSvAppAgent()
self.agent.run_refresh_firewall_loop = False
self.LOG = ovsvapp_agent.LOG
self.agent.monitor_log = logging.getLogger('monitor')
def test_check_ovsvapp_agent_restart(self):
self.agent.int_br = mock.Mock()
with mock.patch.object(self.agent.int_br, 'bridge_exists',
return_value=True) as mock_br_exists, \
mock.patch.object(self.agent.int_br, 'dump_flows_for_table',
return_value='') as mock_dump_flows:
self.assertFalse(self.agent.check_ovsvapp_agent_restart())
self.assertTrue(mock_br_exists.called)
self.assertTrue(mock_dump_flows.called)
mock_dump_flows.return_value = 'cookie = 0x0'
self.assertTrue(self.agent.check_ovsvapp_agent_restart())
self.assertTrue(mock_br_exists.called)
self.assertTrue(mock_dump_flows.called)
class TestOVSvAppAgent(base.TestCase):
@mock.patch('neutron.common.config.init')
@mock.patch('neutron.common.config.setup_logging')
@mock.patch('neutron.agent.ovsdb.api.'
'API.get')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.RpcPluginApi')
@mock.patch('neutron.agent.securitygroups_rpc.SecurityGroupServerRpcApi')
@mock.patch('neutron.agent.rpc.PluginReportStateAPI')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.OVSvAppPluginApi')
@mock.patch('neutron.context.get_admin_context_without_session')
@mock.patch('neutron.agent.rpc.create_consumers')
@mock.patch('neutron.plugins.ml2.drivers.openvswitch.agent.'
'ovs_neutron_agent.OVSNeutronAgent.setup_integration_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.check_ovsvapp_agent_restart')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_ovs_bridges')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent.setup_security_br')
@mock.patch('networking_vsphere.agent.ovsvapp_agent.'
'OVSvAppAgent._init_ovs_flows')
@mock.patch('networking_vsphere.drivers.ovs_firewall.OVSFirewallDriver.'
'check_ovs_firewall_restart')
@mock.patch('networking_vsphere.drivers.ovs_firewall.'
'OVSFirewallDriver.setup_base_flows')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.create')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.set_secure_mode')
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge.get_port_ofport')
def setUp(self, mock_get_port_ofport,
mock_set_secure_mode, mock_create_ovs_bridge,
mock_setup_base_flows, mock_check_ovs_firewall_restart,
mock_init_ovs_flows, mock_setup_security_br,
mock_setup_ovs_bridges, mock_check_ovsvapp_agent_restart,
mock_setup_integration_br, mock_create_consumers,
mock_get_admin_context_without_session, mock_ovsvapp_pluginapi,
mock_plugin_report_stateapi, mock_securitygroup_server_rpcapi,
mock_rpc_pluginapi, mock_ovsdb_api, mock_setup_logging,
mock_init):
super(TestOVSvAppAgent, self).setUp()
cfg.CONF.set_override('security_bridge_mapping',
"fake_sec_br:fake_if", 'SECURITYGROUP')
mock_check_ovsvapp_agent_restart.return_value = False
mock_get_port_ofport.return_value = 5
self.agent = ovsvapp_agent.OVSvAppAgent()
self.agent.run_refresh_firewall_loop = False
self.LOG = ovsvapp_agent.LOG
self.agent.monitor_log = logging.getLogger('monitor')
def _build_port(self, port):
port = {'admin_state_up': False,
'id': port,
'device': DEVICE,
'network_id': NETWORK_ID,
'physical_network': 'physnet1',
'segmentation_id': '1001',
'lvid': 1,
'network_type': 'vlan',
'fixed_ips': [{'subnet_id': 'subnet_uuid',
'ip_address': '1.1.1.1'}],
'device_owner': 'compute:None',
'security_groups': FAKE_SG,
'mac_address': MAC_ADDRESS,
'device_id': FAKE_DEVICE_ID
}
return port
def _build_update_port(self, port):
port = {'admin_state_up': False,
'id': port,
'network_id': NETWORK_ID,
'fixed_ips': [{'subnet_id': 'subnet_uuid',
'ip_address': '1.1.1.1'}],
'device_owner': 'compute:None',
'security_groups': FAKE_SG,
'mac_address': MAC_ADDRESS,
'device_id': FAKE_DEVICE_ID
}
return port
def test_setup_security_br_none(self):
cfg.CONF.set_override('security_bridge_mapping',
None, 'SECURITYGROUP')
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.LOG, 'warning') as mock_logger_warn,\
mock.patch.object(self.agent.sec_br, 'bridge_exists'
) as mock_ovs_bridge:
self.assertRaises(SystemExit,
self.agent.setup_security_br)
self.assertTrue(mock_logger_warn.called)
self.assertFalse(mock_ovs_bridge.called)
def test_setup_security_br(self):
cfg.CONF.set_override('security_bridge_mapping',
"br-fake:fake_if", 'SECURITYGROUP')
self.agent.sec_br = mock.Mock()
self.agent.int_br = mock.Mock()
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(ovs_lib, "OVSBridge") as mock_ovs_br, \
mock.patch.object(self.agent.sec_br,
"add_patch_port",
return_value=5), \
mock.patch.object(self.agent.int_br,
"add_patch_port",
return_value=6):
self.agent.setup_security_br()
self.assertTrue(mock_ovs_br.called)
self.assertTrue(self.agent.sec_br.add_patch_port.called)
self.assertTrue(mock_logger_info.called)
def test_recover_security_br_none(self):
cfg.CONF.set_override('security_bridge_mapping',
None, 'SECURITYGROUP')
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.LOG, 'warning') as mock_logger_warn, \
mock.patch.object(self.agent.sec_br, 'bridge_exists'
) as mock_ovs_bridge:
self.assertRaises(SystemExit,
self.agent.recover_security_br)
self.assertTrue(mock_logger_warn.called)
self.assertFalse(mock_ovs_bridge.called)
@mock.patch('neutron.agent.common.ovs_lib.OVSBridge')
def test_recover_security_br(self, mock_ovs_bridge):
cfg.CONF.set_override('security_bridge_mapping',
"br-sec:physnet1", 'SECURITYGROUP')
self.agent.int_br = mock.Mock()
self.agent.sec_br = mock.Mock()
mock_br = mock_ovs_bridge.return_value
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(mock_br, 'bridge_exists'), \
mock.patch.object(mock_br, 'add_patch_port') as mock_add_patch_port, \
mock.patch.object(self.agent.int_br,
"get_port_ofport",
return_value=6), \
mock.patch.object(mock_br,
"get_port_ofport",
return_value=6), \
mock.patch.object(mock_br,
"delete_port") as mock_delete_port:
mock_br.get_bridge_for_iface.return_value = 'br-sec'
self.agent.recover_security_br()
self.assertTrue(mock_logger_info.called)
self.assertFalse(mock_delete_port.called)
self.assertFalse(mock_add_patch_port.called)
mock_br.get_bridge_for_iface.return_value = 'br-fake'
self.agent.recover_security_br()
self.assertTrue(mock_logger_info.called)
self.assertTrue(mock_delete_port.called)
self.assertTrue(mock_add_patch_port.called)
@mock.patch('neutron.agent.ovsdb.api.'
'API.get')
def test_recover_physical_bridges(self, mock_ovsdb_api):
cfg.CONF.set_override('bridge_mappings',
["physnet1:br-eth1"], 'OVSVAPP')
self.agent.bridge_mappings = n_utils.parse_mappings(
cfg.CONF.OVSVAPP.bridge_mappings)
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(self.LOG, 'error') as mock_logger_error, \
mock.patch.object(self.agent, "br_phys_cls") as mock_ovs_br, \
mock.patch.object(ovs_lib.BaseOVS,
"get_bridges",
return_value=['br-eth1']
), \
mock.patch.object(p_utils, 'get_interface_name'
) as mock_int_name, \
mock.patch.object(self.agent.int_br,
"get_port_ofport",
return_value=6) as mock_get_ofport:
self.agent.recover_physical_bridges(self.agent.bridge_mappings)
self.assertTrue(mock_logger_info.called)
self.assertFalse(mock_logger_error.called)
self.assertTrue(mock_ovs_br.called)
self.assertTrue(mock_get_ofport.called)
self.assertTrue(mock_int_name.called)
self.assertEqual(self.agent.int_ofports['physnet1'], 6)
def test_init_ovs_flows(self):
cfg.CONF.set_override('bridge_mappings',
["physnet1:br-eth1"], 'OVSVAPP')
self.agent.bridge_mappings = n_utils.parse_mappings(
cfg.CONF.OVSVAPP.bridge_mappings)
self.agent.patch_sec_ofport = 5
self.agent.int_ofports = {'physnet1': 'br-eth1'}
self.agent.phys_ofports = {"physnet1": "br-eth1"}
port = self._build_port(FAKE_PORT_1)
br = self._build_phys_brs(port)
self.agent.br = mock.Mock()
with mock.patch.object(self.agent.int_br,
"delete_flows"
) as mock_int_br_delete_flows, \
mock.patch.object(self.agent,
"br_phys_cls") as mock_ovs_br, \
mock.patch.object(self.agent.int_br,
"add_flow") as mock_int_br_add_flow:
self.agent._init_ovs_flows(self.agent.bridge_mappings)
self.assertTrue(mock_int_br_delete_flows.called)
self.assertTrue(mock_ovs_br.called)
self.assertTrue(br.delete_flows.called)
self.assertTrue(br.add_flows.called)
self.assertTrue(mock_int_br_add_flow.called)
def test_update_port_bindings(self):
self.agent.ports_to_bind.add("fake_port")
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_ports_binding",
return_value=set(["fake_port"])
) as mock_update_ports_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._update_port_bindings()
self.assertTrue(mock_update_ports_binding.called)
self.assertFalse(self.agent.ports_to_bind)
self.assertFalse(mock_log_exception.called)
def test_update_port_bindings_rpc_exception(self):
self.agent.ports_to_bind.add("fake_port")
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_ports_binding",
side_effect=Exception()
) as mock_update_port_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._update_port_bindings)
self.assertTrue(mock_update_port_binding.called)
self.assertTrue(mock_log_exception.called)
self.assertEqual(set(['fake_port']),
self.agent.ports_to_bind)
def test_update_port_bindings_partial(self):
self.agent.ports_to_bind.add("fake_port1")
self.agent.ports_to_bind.add("fake_port2")
self.agent.ports_to_bind.add("fake_port3")
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_ports_binding",
return_value=set(["fake_port1",
"fake_port2"])
) as mock_update_port_binding, \
mock.patch.object(self.LOG, 'exception'):
self.agent._update_port_bindings()
self.assertTrue(mock_update_port_binding.called)
self.assertEqual(set(["fake_port3"]),
self.agent.ports_to_bind)
def test_setup_ovs_bridges_vlan(self):
cfg.CONF.set_override('tenant_network_types',
"vlan", 'OVSVAPP')
cfg.CONF.set_override('bridge_mappings',
["physnet1:br-eth1"], 'OVSVAPP')
with mock.patch.object(self.agent, 'setup_physical_bridges'
) as mock_phys_brs, \
mock.patch.object(self.agent, '_init_ovs_flows'
) as mock_init_ovs_flows:
self.agent.setup_ovs_bridges()
mock_phys_brs.assert_called_with(self.agent.bridge_mappings)
mock_init_ovs_flows.assert_called_with(self.agent.bridge_mappings)
@mock.patch('neutron.agent.ovsdb.api.'
'API.get')
def test_setup_ovs_bridges_vxlan(self, mock_ovsdb_api):
self.agent.local_ip = "10.10.10.10"
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent, 'setup_tunnel_br'
) as mock_setup_tunnel_br, \
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows:
self.agent.setup_ovs_bridges()
mock_setup_tunnel_br.assert_called_with("br-tun")
self.assertTrue(mock_setup_tunnel_br_flows.called)
def test_setup_ovs_bridges_vxlan_ofport(self):
cfg.CONF.set_override('tenant_network_types',
"vxlan", 'OVSVAPP')
cfg.CONF.set_override('local_ip',
"10.10.10.10", 'OVSVAPP')
cfg.CONF.set_override('tunnel_bridge',
"br-tun", 'OVSVAPP')
self.agent.tun_br = mock.Mock()
self.agent.int_br = mock.Mock()
self.agent.local_ip = "10.10.10.10"
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent.tun_br,
"add_patch_port",
return_value=5), \
mock.patch.object(self.agent.int_br,
"add_patch_port",
return_value=6), \
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows:
self.agent.setup_ovs_bridges()
self.assertTrue(self.agent.tun_br.add_patch_port.called)
self.assertEqual(self.agent.patch_tun_ofport, 6)
self.assertEqual(self.agent.patch_int_ofport, 5)
self.assertTrue(mock_setup_tunnel_br_flows.called)
def test_mitigate_ovs_restart_vlan(self):
self.agent.refresh_firewall_required = False
self.agent.devices_to_filter = set(['1111'])
self.agent.cluster_host_ports = set(['1111'])
self.agent.cluster_other_ports = set(['2222'])
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(self.agent, "setup_integration_br"
) as mock_int_br, \
mock.patch.object(self.agent, "setup_physical_bridges"
) as mock_phys_brs, \
mock.patch.object(self.agent, "setup_security_br"
) as mock_sec_br, \
mock.patch.object(self.agent.sg_agent, "init_firewall"
) as mock_init_fw, \
mock.patch.object(self.agent, "setup_tunnel_br"
) as mock_setup_tunnel_br,\
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows, \
mock.patch.object(self.agent, "_init_ovs_flows"
) as mock_init_flows, \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent.mitigate_ovs_restart()
self.assertTrue(mock_int_br.called)
self.assertTrue(mock_phys_brs.called)
self.assertTrue(mock_sec_br.called)
self.assertFalse(mock_setup_tunnel_br.called)
self.assertFalse(mock_setup_tunnel_br_flows.called)
self.assertTrue(mock_init_fw.called)
self.assertTrue(mock_init_flows.called)
self.assertTrue(self.agent.refresh_firewall_required)
self.assertEqual(2, len(self.agent.devices_to_filter))
monitor_warning.assert_called_with("ovs: broken")
monitor_info.assert_called_with("ovs: ok")
self.assertTrue(mock_logger_info.called)
def test_mitigate_ovs_restart_vxlan(self):
self.agent.enable_tunneling = True
self.agent.refresh_firewall_required = False
self.agent.devices_to_filter = set(['1111'])
self.agent.cluster_host_ports = set(['1111'])
self.agent.cluster_other_ports = set(['2222'])
with mock.patch.object(self.LOG, 'info') as mock_logger_info, \
mock.patch.object(self.agent, "setup_integration_br"), \
mock.patch.object(self.agent, "setup_physical_bridges"
) as mock_phys_brs, \
mock.patch.object(self.agent, "setup_security_br"), \
mock.patch.object(self.agent.sg_agent, "init_firewall"
), \
mock.patch.object(self.agent, "setup_tunnel_br"
) as mock_setup_tunnel_br,\
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows, \
mock.patch.object(self.agent, "tunnel_sync"
) as mock_tun_sync, \
mock.patch.object(self.agent, "_init_ovs_flows"), \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent.mitigate_ovs_restart()
self.assertTrue(mock_setup_tunnel_br.called)
self.assertTrue(mock_setup_tunnel_br_flows.called)
self.assertFalse(mock_phys_brs.called)
self.assertTrue(mock_tun_sync.called)
self.assertTrue(self.agent.refresh_firewall_required)
self.assertEqual(len(self.agent.devices_to_filter), 2)
monitor_warning.assert_called_with("ovs: broken")
monitor_info.assert_called_with("ovs: ok")
self.assertTrue(mock_logger_info.called)
def test_mitigate_ovs_restart_exception(self):
self.agent.enable_tunneling = False
self.agent.refresh_firewall_required = False
self.agent.devices_to_filter = set()
self.agent.cluster_host_ports = set(['1111'])
self.agent.cluster_other_ports = set(['2222'])
with mock.patch.object(self.LOG, "info") as mock_logger_info, \
mock.patch.object(self.agent, "setup_integration_br",
side_effect=Exception()) as mock_int_br, \
mock.patch.object(self.agent, "setup_physical_bridges"
) as mock_phys_brs, \
mock.patch.object(self.agent, "setup_tunnel_br"
) as mock_setup_tunnel_br,\
mock.patch.object(self.agent, 'setup_tunnel_br_flows'
) as mock_setup_tunnel_br_flows, \
mock.patch.object(self.LOG, "exception"
) as mock_exception_log, \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent.mitigate_ovs_restart()
self.assertTrue(mock_int_br.called)
self.assertFalse(mock_phys_brs.called)
self.assertFalse(mock_setup_tunnel_br.called)
self.assertFalse(mock_setup_tunnel_br_flows.called)
self.assertFalse(mock_logger_info.called)
self.assertTrue(mock_exception_log.called)
self.assertFalse(self.agent.refresh_firewall_required)
self.assertEqual(0, len(self.agent.devices_to_filter))
monitor_warning.assert_called_with("ovs: broken")
self.assertFalse(monitor_info.called)
def _get_fake_port(self, port_id):
return {'id': port_id,
'port_id': port_id,
'mac_address': MAC_ADDRESS,
'fixed_ips': [{'subnet_id': 'subnet_uuid',
'ip_address': '1.1.1.1'}],
'security_groups': FAKE_SG,
'segmentation_id': 1232,
'lvid': 1,
'network_id': 'fake_network',
'device_id': FAKE_DEVICE_ID,
'admin_state_up': True,
'physical_network': 'physnet1',
'network_type': 'vlan'}
def _build_phys_brs(self, port):
phys_net = port['physical_network']
self.agent.phys_brs[phys_net] = {}
self.agent.phys_brs[phys_net]['eth_ofport'] = 5
br = self.agent.phys_brs[phys_net]['br'] = mock.Mock()
br.add_flows(port['segmentation_id'],
port['mac_address'],
5)
br.delete_flows(port['mac_address'],
port['segmentation_id'])
return br
def test_process_port(self):
fakeport = self._get_fake_port(FAKE_PORT_1)
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
br = self._build_phys_brs(fakeport)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = fakeport
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan:
status = self.agent._process_port(fakeport)
self.assertIn(FAKE_PORT_1, self.agent.ports_dict)
self.assertTrue(status)
mock_add_devices.assert_called_with([fakeport])
mock_prov_local_vlan.assert_called_with(fakeport)
self.assertTrue(br.add_flows.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
def test_process_port_existing_network(self):
fakeport = self._get_fake_port(FAKE_PORT_1)
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
br = self._build_phys_brs(fakeport)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = {}
self._build_lvm(fakeport)
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan:
status = self.agent._process_port(fakeport)
self.assertIn(FAKE_PORT_1, self.agent.ports_dict)
self.assertTrue(status)
mock_add_devices.assert_called_with([fakeport])
self.assertFalse(mock_prov_local_vlan.called)
self.assertTrue(br.add_flows.called)
def test_process_uncached_devices_with_few_devices(self):
devices = set(['123', '234', '345', '456', '567', '678',
'1123', '1234', '1345', '1456', '1567', '1678'])
with mock.patch('eventlet.GreenPool.spawn_n') as mock_spawn_thread, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices(devices)
self.assertTrue(mock_spawn_thread.called)
self.assertEqual(1, mock_spawn_thread.call_count)
self.assertFalse(mock_log_exception.called)
def test_process_uncached_devices_with_more_devices(self):
devices = set(['123', '234', '345', '456', '567', '678',
'1123', '1234', '1345', '1456', '1567', '1678',
'2123', '2234', '2345', '2456', '2567', '2678',
'3123', '3234', '3345', '3456', '3567', '3678',
'4123', '4234', '4345', '4456', '4567', '4678',
'5123', '5234', '5345', '5456', '5567', '5678',
'6123', '6234', '6345', '6456', '6567', '6678'])
with mock.patch('eventlet.GreenPool.spawn_n') as mock_spawn_thread, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices(devices)
self.assertTrue(mock_spawn_thread.called)
self.assertEqual(2, mock_spawn_thread.call_count)
self.assertFalse(mock_log_exception.called)
def test_process_uncached_devices_sublist_single_port_vlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
self.agent.ports_dict = {}
br = self._build_phys_brs(fakeport_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
devices = [FAKE_PORT_1]
self.agent.vlan_manager.mapping = {}
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_provision_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertEqual(1, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertTrue(mock_provision_local_vlan.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertTrue(br.add_flows.called)
def test_process_uncached_devices_sublist_multiple_port_vlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
br = self._build_phys_brs(fakeport_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.cluster_host_ports.add(FAKE_PORT_2)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
self.agent.vnic_info[FAKE_PORT_2] = fakeport_2
devices = [FAKE_PORT_1, FAKE_PORT_2]
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertEqual(2, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_2, self.agent.vnic_info)
self.assertTrue(br.add_flows.called)
def test_process_uncached_devices_sublist_single_port_vxlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_1["network_type"] = p_const.TYPE_VXLAN
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
devices = [FAKE_PORT_1]
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_populate_lvm'), \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertEqual(1, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
def test_process_uncached_devices_sublist_multiple_port_vxlan(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
fakeport_1["network_type"] = p_const.TYPE_VXLAN
fakeport_2["network_type"] = p_const.TYPE_VXLAN
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.cluster_host_ports.add(FAKE_PORT_2)
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
self.agent.vnic_info[FAKE_PORT_2] = fakeport_2
devices = [FAKE_PORT_1, FAKE_PORT_2]
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent, '_populate_lvm'), \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertEqual(2, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_2, self.agent.vnic_info)
def test_process_uncached_devices_sublist_stale_vm_port(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
fakeport_3 = self._get_fake_port(FAKE_PORT_3)
self.agent.ports_dict = {}
self.agent.vlan_manager.mapping = {}
self._build_phys_brs(fakeport_1)
self._build_phys_brs(fakeport_2)
self._build_phys_brs(fakeport_3)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.cluster_host_ports.add(FAKE_PORT_2)
self.agent.ports_to_bind = set([FAKE_PORT_3, FAKE_PORT_4])
self.agent.vnic_info[FAKE_PORT_1] = fakeport_1
self.agent.vnic_info[FAKE_PORT_2] = fakeport_2
self.agent.vnic_info[FAKE_PORT_3] = fakeport_3
devices = [FAKE_PORT_1, FAKE_PORT_2, FAKE_PORT_3]
self.agent.sg_agent.remove_devices_filter = mock.Mock()
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_to_filter, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
)as mock_refresh_firewall, \
mock.patch.object(self.agent.sg_agent,
'remove_devices_filter'
)as mock_remove_device_filter, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent, '_remove_stale_ports_flows'), \
mock.patch.object(self.agent, '_block_stale_ports'), \
mock.patch.object(self.LOG, 'exception') as mock_log_exception:
self.agent._process_uncached_devices_sublist(devices)
self.assertTrue(mock_get_ports_details_list.called)
self.assertEqual(2, mock_add_devices_to_filter.call_count)
self.assertTrue(mock_refresh_firewall.called)
self.assertTrue(mock_prov_local_vlan.called)
self.assertFalse(mock_log_exception.called)
self.assertNotIn(FAKE_PORT_3, self.agent.ports_to_bind)
self.assertIn(FAKE_PORT_4, self.agent.ports_to_bind)
self.assertNotIn(FAKE_PORT_1, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_2, self.agent.vnic_info)
self.assertNotIn(FAKE_PORT_3, self.agent.vnic_info)
mock_remove_device_filter.assert_called_with(FAKE_PORT_3)
def test_update_firewall(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
fakeport_2 = self._get_fake_port(FAKE_PORT_2)
self._build_phys_brs(fakeport_1)
self._build_phys_brs(fakeport_2)
self.agent.devices_to_filter = set([FAKE_PORT_1,
FAKE_PORT_2])
self.agent.ports_dict = {FAKE_PORT_1: fakeport_1}
self.agent.vnic_info[FAKE_PORT_1] = {}
self.agent.vnic_info[FAKE_PORT_2] = {}
self.agent.refresh_firewall_required = True
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
return_value=[fakeport_1, fakeport_2]
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
) as mock_refresh_firewall, \
mock.patch.object(self.agent, '_provision_local_vlan'
), \
mock.patch.object(self.agent, '_remove_stale_ports_flows'), \
mock.patch.object(self.agent, '_block_stale_ports'), \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent._update_firewall()
self.assertFalse(self.agent.refresh_firewall_required)
self.assertFalse(self.agent.devices_to_filter)
self.assertIn(FAKE_PORT_2, self.agent.ports_dict)
mock_get_ports_details_list.assert_called_with(
self.agent.context,
[FAKE_PORT_2],
self.agent.agent_id,
self.agent.vcenter_id,
self.agent.cluster_id)
mock_refresh_firewall.assert_called_with(set([FAKE_PORT_1,
FAKE_PORT_2]))
self.assertEqual(2, monitor_warning.call_count)
self.assertEqual(2, monitor_info.call_count)
def test_update_firewall_get_ports_exception(self):
fakeport_1 = self._get_fake_port(FAKE_PORT_1)
self.agent.devices_to_filter = set([FAKE_PORT_1,
FAKE_PORT_2])
self.agent.ports_dict = {FAKE_PORT_1: fakeport_1}
self.agent.refresh_firewall_required = True
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
with mock.patch.object(self.agent.ovsvapp_rpc,
'get_ports_details_list',
side_effect=Exception()
) as mock_get_ports_details_list, \
mock.patch.object(self.agent.sg_agent, 'refresh_firewall'
) as mock_refresh_firewall, \
mock.patch.object(self.agent.monitor_log, "warning"
) as monitor_warning, \
mock.patch.object(self.agent.monitor_log, "info"
) as monitor_info:
self.agent._update_firewall()
self.assertTrue(self.agent.refresh_firewall_required)
self.assertEqual(set([FAKE_PORT_2]), self.agent.devices_to_filter)
self.assertNotIn(FAKE_PORT_2, self.agent.ports_dict)
mock_get_ports_details_list.assert_called_with(
self.agent.context,
[FAKE_PORT_2],
self.agent.agent_id,
self.agent.vcenter_id,
self.agent.cluster_id)
mock_refresh_firewall.assert_called_with(set([FAKE_PORT_1]))
self.assertEqual(2, monitor_warning.call_count)
self.assertEqual(1, monitor_info.call_count)
def test_check_for_updates_no_updates(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent.sg_agent, 'refresh_port_filters'
) as mock_refresh_port_filters, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_refresh_port_filters.called)
self.assertFalse(mock_update_port_bindings.called)
def test_check_for_updates_ovs_restarted(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=0) as mock_check_ovs, \
mock.patch.object(self.agent, 'mitigate_ovs_restart'
) as mock_mitigate, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertTrue(mock_mitigate.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
@mock.patch.object(ovsvapp_agent.OVSvAppAgent, 'check_ovs_status')
def test_check_for_updates_ovs_dead(self, check_ovs_status):
check_ovs_status.return_value = 2
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'mitigate_ovs_restart'
) as mock_mitigate, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(self.agent.ovsvapp_mitigation_required)
self.assertTrue(check_ovs_status.called)
self.assertFalse(mock_mitigate.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
check_ovs_status.return_value = 1
self.agent._check_for_updates()
self.assertTrue(check_ovs_status.called)
self.assertTrue(mock_mitigate.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
self.assertFalse(self.agent.ovsvapp_mitigation_required)
def test_check_for_updates_devices_to_filter(self):
self.agent.refresh_firewall_required = True
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, 'mitigate_ovs_restart'
) as mock_mitigate, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall,\
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_mitigate.called)
self.assertTrue(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertFalse(mock_update_port_bindings.called)
def test_check_for_updates_firewall_refresh(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind = None
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=True
) as mock_firewall_refresh,\
mock.patch.object(self.agent.sg_agent, 'refresh_port_filters'
) as mock_refresh_port_filters, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertTrue(mock_refresh_port_filters.called)
self.assertFalse(mock_update_port_bindings.called)
def test_check_for_updates_port_bindings(self):
self.agent.refresh_firewall_required = False
self.agent.ports_to_bind.add("fake_port")
with mock.patch.object(self.agent, 'check_ovs_status',
return_value=4) as mock_check_ovs, \
mock.patch.object(self.agent, '_update_firewall'
) as mock_update_firewall, \
mock.patch.object(self.agent.sg_agent,
'firewall_refresh_needed',
return_value=False
) as mock_firewall_refresh, \
mock.patch.object(self.agent, '_update_port_bindings'
) as mock_update_port_bindings:
self.agent._check_for_updates()
self.assertTrue(mock_check_ovs.called)
self.assertFalse(mock_update_firewall.called)
self.assertTrue(mock_firewall_refresh.called)
self.assertTrue(mock_update_port_bindings.called)
def test_update_devices_up(self):
self.agent.devices_up_list.append(FAKE_PORT_1)
ret_value = {'devices_up': [FAKE_PORT_1],
'failed_devices_up': []}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_up",
return_value=ret_value
) as update_devices_up, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_up()
self.assertTrue(update_devices_up.called)
self.assertFalse(self.agent.devices_up_list)
self.assertFalse(log_exception.called)
def test_update_devices_up_rpc_exception(self):
self.agent.devices_up_list.append(FAKE_PORT_1)
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_up",
side_effect=Exception()
) as update_devices_up, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_up()
self.assertTrue(update_devices_up.called)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertTrue(log_exception.called)
def test_update_devices_up_partial(self):
self.agent.devices_up_list = [FAKE_PORT_1, FAKE_PORT_2, FAKE_PORT_3]
ret_value = {'devices_up': [FAKE_PORT_1, FAKE_PORT_2],
'failed_devices_up': [FAKE_PORT_3]}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_up",
return_value=ret_value
) as update_devices_up, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_up()
self.assertTrue(update_devices_up.called)
self.assertEqual([FAKE_PORT_3], self.agent.devices_up_list)
self.assertFalse(log_exception.called)
def test_update_devices_down(self):
self.agent.devices_down_list.append(FAKE_PORT_1)
ret_value = {'devices_down': [FAKE_PORT_1],
'failed_devices_down': []}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_down",
return_value=ret_value
) as update_devices_down, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_down()
self.assertTrue(update_devices_down.called)
self.assertFalse(self.agent.devices_down_list)
self.assertFalse(log_exception.called)
def test_update_devices_down_rpc_exception(self):
self.agent.devices_down_list.append(FAKE_PORT_1)
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_down",
side_effect=Exception()
) as update_devices_down, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_down()
self.assertTrue(update_devices_down.called)
self.assertEqual([FAKE_PORT_1], self.agent.devices_down_list)
self.assertTrue(log_exception.called)
def test_update_devices_down_partial(self):
self.agent.devices_down_list = [FAKE_PORT_1, FAKE_PORT_2, FAKE_PORT_3]
ret_value = {'devices_down': [FAKE_PORT_1, FAKE_PORT_2],
'failed_devices_down': [FAKE_PORT_3]}
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_devices_down",
return_value=ret_value
) as update_devices_down, \
mock.patch.object(self.LOG, 'exception'
) as log_exception:
self.agent._update_devices_down()
self.assertTrue(update_devices_down.called)
self.assertEqual([FAKE_PORT_3], self.agent.devices_down_list)
self.assertFalse(log_exception.called)
def test_report_state(self):
with mock.patch.object(self.agent.state_rpc,
"report_state") as report_st:
self.agent._report_state()
report_st.assert_called_with(self.agent.context,
self.agent.agent_state,
True)
self.assertNotIn("start_flag", self.agent.agent_state)
self.assertFalse(self.agent.use_call)
self.assertEqual(cfg.CONF.host,
self.agent.agent_state["host"])
def test_report_state_fail(self):
with mock.patch.object(self.agent.state_rpc,
"report_state",
side_effect=Exception()) as mock_report_st, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._report_state()
mock_report_st.assert_called_with(self.agent.context,
self.agent.agent_state,
True)
self.assertTrue(mock_log_exception.called)
def test_process_event_ignore_event(self):
vm = VM(FAKE_VM, [])
event = SampleEvent(VNIC_ADDED, FAKE_HOST_1,
FAKE_CLUSTER_MOID, vm)
with mock.patch.object(self.agent,
"_notify_device_added") as mock_add_vm, \
mock.patch.object(self.agent,
"_notify_device_updated") as mock_update_vm, \
mock.patch.object(self.agent,
"_notify_device_deleted") as mock_del_vm, \
mock.patch.object(self.LOG, 'debug') as mock_log_debug:
self.agent.process_event(event)
self.assertFalse(mock_add_vm.called)
self.assertFalse(mock_update_vm.called)
self.assertFalse(mock_del_vm.called)
self.assertTrue(mock_log_debug.called)
def test_process_event_exception(self):
vm = VM(FAKE_VM, [])
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
with mock.patch.object(self.agent,
"_notify_device_added",
side_effect=Exception()) as mock_add_vm, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(self.LOG, 'error') as mock_log_error:
self.agent.process_event(event)
self.assertTrue(mock_add_vm.called)
self.assertTrue(mock_log_error.called)
self.assertTrue(mock_log_exception.called)
def test_process_event_vm_create_nonics_non_host_non_cluster(self):
self.agent.esx_hostname = FAKE_HOST_2
vm = VM(FAKE_VM, [])
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent,
"_notify_device_added") as device_added:
self.agent.process_event(event)
self.assertTrue(device_added.called)
def test_process_event_vm_create_nonics_non_host(self):
self.agent.esx_hostname = FAKE_HOST_2
vm = VM(FAKE_VM, [])
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent,
"_notify_device_added") as device_added:
self.agent.process_event(event)
self.assertTrue(device_added.called)
self.assertEqual(FAKE_CLUSTER_MOID, self.agent.cluster_moid)
def test_process_event_vm_create_nics_non_host(self):
self.agent.esx_hostname = FAKE_HOST_2
vm_port1 = SamplePort(FAKE_PORT_1)
vm_port2 = SamplePort(FAKE_PORT_2)
vm = VM(FAKE_VM, ([vm_port1, vm_port2]))
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.agent.sec_br, 'dump_flows_for',
return_value='mock_flow') as mock_dump_flows:
self.agent.process_event(event)
self.assertTrue(mock_dump_flows.called)
for vnic in vm.vnics:
self.assertIn(vnic.port_uuid, self.agent.devices_to_filter)
self.assertIn(vnic.port_uuid, self.agent.cluster_other_ports)
self.assertNotIn(vnic.port_uuid, self.agent.cluster_host_ports)
def test_process_event_vm_create_nics_host(self):
self.agent.esx_hostname = FAKE_HOST_1
vm_port1 = SamplePort(FAKE_PORT_1)
vm_port2 = SamplePort(FAKE_PORT_2)
vm = VM(FAKE_VM, ([vm_port1, vm_port2]))
event = SampleEvent(ovsvapp_const.VM_CREATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.sec_br = mock.Mock()
with mock.patch.object(self.agent.sec_br, 'dump_flows_for',
return_value='mock_flow') as mock_dump_flows:
self.agent.process_event(event)
self.assertTrue(mock_dump_flows.called)
for vnic in vm.vnics:
self.assertIn(vnic.port_uuid, self.agent.devices_to_filter)
self.assertIn(vnic.port_uuid, self.agent.cluster_host_ports)
self.assertNotIn(vnic.port_uuid, self.agent.cluster_other_ports)
with mock.patch.object(self.agent.sec_br, 'dump_flows_for',
return_value='') as mock_dump_flows, \
mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
return_value=True) as mock_get_ports:
self.agent.process_event(event)
self.assertTrue(mock_dump_flows.called)
self.assertTrue(mock_get_ports.called)
def test_process_event_vm_updated_nonhost(self):
self.agent.esx_hostname = FAKE_HOST_2
vm_port1 = SamplePort(FAKE_PORT_1)
port = self._build_port(FAKE_PORT_1)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm = VM(FAKE_VM, [vm_port1])
event = SampleEvent(ovsvapp_const.VM_UPDATED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm, True)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.process_event(event)
self.assertIn(FAKE_PORT_1, self.agent.cluster_other_ports)
def test_process_event_vm_delete_hosted_vm_vlan(self):
self.agent.esx_hostname = FAKE_HOST_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
port = self._build_port(FAKE_PORT_1)
br = self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm_port = SamplePortUIDMac(FAKE_PORT_1, MAC_ADDRESS)
vm = VM(FAKE_VM, ([vm_port]))
event = SampleEvent(ovsvapp_const.VM_DELETED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self._build_lvm(port)
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.net_mgr.get_driver(),
"post_delete_vm",
) as mock_post_del_vm, \
mock.patch.object(self.LOG, 'debug'), \
mock.patch.object(self.agent.net_mgr.get_driver(),
"delete_network") as mock_del_net:
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertNotIn(vnic.port_uuid, self.agent.cluster_host_ports)
self.assertTrue(mock_post_del_vm.called)
self.assertFalse(mock_del_net.called)
self.assertTrue(br.delete_flows.called)
def test_process_event_vm_delete_hosted_vm_vxlan(self):
self.agent.esx_hostname = FAKE_HOST_1
self.agent.cluster_host_ports.add(FAKE_PORT_1)
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
port = self._build_port(FAKE_PORT_1)
port['network_type'] = p_const.TYPE_VXLAN
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm_port = SamplePortUIDMac(FAKE_PORT_1, MAC_ADDRESS)
vm = VM(FAKE_VM, ([vm_port]))
event = SampleEvent(ovsvapp_const.VM_DELETED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.net_mgr.get_driver(),
"post_delete_vm",
return_value=True) as (post_del_vm):
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertNotIn(vnic.port_uuid, self.agent.cluster_host_ports)
self.assertTrue(post_del_vm.called)
def test_process_event_vm_delete_non_hosted_vm(self):
self.agent.esx_hostname = FAKE_HOST_2
self.agent.cluster_other_ports.add(FAKE_PORT_1)
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
port = self._build_port(FAKE_PORT_1)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
vm_port = SamplePortUIDMac(FAKE_PORT_1, MAC_ADDRESS)
vm = VM(FAKE_VM, ([vm_port]))
event = SampleEvent(ovsvapp_const.VM_DELETED,
FAKE_HOST_1, FAKE_CLUSTER_MOID, vm)
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.net_mgr.get_driver(),
"post_delete_vm",
return_value=True) as mock_post_del_vm, \
mock.patch.object(self.agent.net_mgr.get_driver(),
"delete_network") as mock_del_net:
self.agent.process_event(event)
for vnic in vm.vnics:
self.assertNotIn(vnic.port_uuid,
self.agent.cluster_other_ports)
self.assertTrue(mock_post_del_vm.called)
self.assertFalse(mock_del_net.called)
def test_notify_device_added_with_hosted_vm(self):
vm = VM(FAKE_VM, [])
host = FAKE_HOST_1
self.agent.esx_hostname = host
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
return_value=True) as mock_get_ports, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(time, "sleep") as mock_time_sleep:
self.agent._notify_device_added(vm, host)
self.assertTrue(mock_get_ports.called)
self.assertFalse(mock_time_sleep.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_added_rpc_exception(self):
vm = VM(FAKE_VM, [])
host = FAKE_HOST_1
self.agent.esx_hostname = host
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
side_effect=Exception()) as mock_get_ports, \
mock.patch.object(self.LOG, 'exception'
)as mock_log_exception, \
mock.patch.object(time, "sleep") as mock_time_sleep:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._notify_device_added, vm, host)
self.assertTrue(mock_log_exception.called)
self.assertTrue(mock_get_ports.called)
self.assertFalse(mock_time_sleep.called)
def test_notify_device_added_with_retry(self):
vm = VM(FAKE_VM, [])
host = FAKE_HOST_1
self.agent.esx_hostname = host
self.agent.state = ovsvapp_const.AGENT_RUNNING
with mock.patch.object(self.agent.ovsvapp_rpc,
"get_ports_for_device",
return_value=False) as mock_get_ports, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(time, "sleep") as mock_time_sleep:
self.agent._notify_device_added(vm, host)
self.assertTrue(mock_get_ports.called)
self.assertTrue(mock_time_sleep.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_updated_migration_vlan(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
self._build_lvm(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent._add_ports_to_host_ports([FAKE_PORT_1])
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._notify_device_updated(vm, FAKE_HOST_2, True)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertFalse(mock_update_device_binding.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_update_not_found(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
self._build_phys_brs(port)
self._build_lvm(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
br = self.agent.phys_brs[port['physical_network']]['br']
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
):
self.agent._notify_device_updated(vm, host, True)
self.assertFalse(br.add_drop_flows.called)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
):
self.agent._notify_device_updated(vm, host, True)
self.assertTrue(br.add_drop_flows.called)
def test_notify_device_updated_host_vlan(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
self._build_lvm(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
br = self.agent.phys_brs[port['physical_network']]['br']
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding:
self.agent._notify_device_updated(vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertTrue(br.add_flows.called)
def test_notify_device_updated_vlan_rpc_exception(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
port = self._build_port(FAKE_PORT_1)
br = self._build_phys_brs(port)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(port)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding",
side_effect=Exception()
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._notify_device_updated, vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(br.add_flows.called)
self.assertTrue(mock_update_device_binding.called)
self.assertTrue(mock_log_exception.called)
def test_notify_device_updated_host_vlan_multiple_nic(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm_port2 = SamplePort(FAKE_PORT_2)
vm = VM(FAKE_VM, ([vm_port1, vm_port2]))
port1 = self._build_port(FAKE_PORT_1)
port2 = self._build_port(FAKE_PORT_2)
br1 = self._build_phys_brs(port1)
br2 = self._build_phys_brs(port2)
self.agent.ports_dict[port1['id']] = self.agent._build_port_info(port1)
self.agent.ports_dict[port2['id']] = self.agent._build_port_info(port2)
self._build_lvm(port1)
self._build_lvm(port2)
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._notify_device_updated(vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertFalse(mock_log_exception.called)
self.assertEqual(1, mock_update_device_binding.call_count)
self.assertTrue(br1.add_flows.called)
self.assertTrue(br2.add_flows.called)
def _build_lvm(self, port):
try:
self.agent.vlan_manager.add(port['network_id'], port['lvid'],
port['network_type'],
port['physical_network'], '1234')
except vlanmanager.MappingAlreadyExists:
return None
def test_notify_device_updated_host_vxlan(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
port1 = self._build_port(FAKE_PORT_1)
port1['network_type'] = p_const.TYPE_VXLAN
self.agent.ports_dict[port1['id']] = self.agent._build_port_info(port1)
vm = VM(FAKE_VM, [vm_port1])
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding"
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.agent._notify_device_updated(vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertFalse(mock_log_exception.called)
def test_notify_device_updated_vxlan_rpc_exception(self):
host = FAKE_HOST_1
self.agent.esx_hostname = host
vm_port1 = SamplePort(FAKE_PORT_1)
vm = VM(FAKE_VM, [vm_port1])
self.agent.state = ovsvapp_const.AGENT_RUNNING
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
with mock.patch.object(self.agent.ovsvapp_rpc,
"update_device_binding",
side_effect=Exception()
) as mock_update_device_binding, \
mock.patch.object(self.LOG, 'exception'
) as mock_log_exception:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent._notify_device_updated, vm, host, True)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertTrue(mock_update_device_binding.called)
self.assertTrue(mock_log_exception.called)
def test_map_port_to_common_model_vlan(self):
expected_port = self._build_port(FAKE_PORT_1)
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
network, port = self.agent._map_port_to_common_model(expected_port)
expected_name = expected_port['network_id'] + "-" + FAKE_CLUSTER_MOID
self.assertEqual(expected_name, network.name)
self.assertEqual(expected_port['id'], port.uuid)
def test_map_port_to_common_model_vxlan(self):
expected_port = self._build_port(FAKE_PORT_1)
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
network, port = self.agent._map_port_to_common_model(expected_port, 1)
expected_name = expected_port['network_id'] + "-" + FAKE_CLUSTER_MOID
self.assertEqual(expected_name, network.name)
self.assertEqual(expected_port['id'], port.uuid)
def test_device_create_cluster_mismatch(self):
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_2
with mock.patch.object(self.agent,
'_process_create_ports',
return_value=True) as mock_create_ports, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE)
self.assertTrue(mock_logger_debug.called)
self.assertFalse(mock_create_ports.called)
def test_device_create_non_hosted_vm(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.esx_hostname = FAKE_HOST_2
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.vlan_manager.mapping = {}
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
mock_add_devices_fn.assert_called_with(ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertFalse(self.agent.devices_up_list)
self.assertTrue(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vlan(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.vlan_manager.mapping = {}
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
mock_add_devices_fn.assert_called_with(ports)
self.assertTrue(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vlan_sg_rule_missing(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.vlan_manager.mapping = {}
self.agent.devices_to_filter = set()
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_MISSING
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertIn(FAKE_PORT_1, self.agent.devices_to_filter)
mock_add_devices_fn.assert_called_with(ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vlan_sg_rule_partial_missing(self):
ports = [self._build_port(FAKE_PORT_1)]
self._build_phys_brs(ports[0])
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.devices_to_filter = set()
self.agent.vlan_manager.mapping = {}
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_PARTIAL
) as mock_expand_sg_rules, \
mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertIn(FAKE_PORT_1, self.agent.devices_to_filter)
mock_add_devices_fn.assert_called_with(ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_prov_local_vlan.called)
def test_device_create_hosted_vm_vxlan(self):
port = self._build_port(FAKE_PORT_1)
port['network_type'] = p_const.TYPE_VXLAN
ports = [port]
self.agent.vlan_manager.mapping = {}
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.vlan_manager.mapping = {}
self.agent.devices_to_filter = set()
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent.sg_agent,
'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'
) as mock_update_device_up, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_prov_local_vlan.called)
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertNotIn(FAKE_PORT_1, self.agent.devices_to_filter)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
mock_add_devices_fn.assert_called_with(ports)
self.assertTrue(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_update_device_up.called)
def test_device_create_hosted_vm_vxlan_sg_rule_missing(self):
port = self._build_port(FAKE_PORT_1)
port['network_type'] = p_const.TYPE_VXLAN
ports = [port]
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VXLAN]
self.agent.vlan_manager.mapping = {}
self.agent.devices_to_filter = set()
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
with mock.patch.object(self.agent, '_provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent.sg_agent,
'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_MISSING
) as mock_expand_sg_rules, \
mock.patch.object(self.agent.plugin_rpc, 'update_device_up'
) as mock_update_device_up, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_prov_local_vlan.called)
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.devices_to_filter)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
mock_add_devices_fn.assert_called_with(ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_update_device_up.called)
def test_device_create_hosted_vm_create_port_exception(self):
ports = [self._build_port(FAKE_PORT_1)]
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().create_port = mock.Mock(
side_effect=Exception())
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
), \
mock.patch.object(self.agent, '_provision_local_vlan'
), \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
) as mock_sg_update_fn, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES
) as mock_expand_sg_rules, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug, \
mock.patch.object(self.LOG, 'exception') as mock_log_excep:
self.assertRaises(
error.OVSvAppNeutronAgentError,
self.agent.device_create,
FAKE_CONTEXT, device=DEVICE,
ports=ports, sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertNotIn(FAKE_PORT_1, self.agent.cluster_other_ports)
self.assertIn(FAKE_PORT_1, self.agent.cluster_host_ports)
self.assertFalse(mock_sg_update_fn.called)
self.assertTrue(mock_expand_sg_rules.called)
self.assertTrue(mock_log_excep.called)
def test_port_update_admin_state_up(self):
port = self._build_port(FAKE_PORT_1)
self.agent.ports_dict[port['id']] = self.agent._build_port_info(
port)
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.cluster_host_ports = set([port['id']])
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
updated_port = self._build_update_port(FAKE_PORT_1)
updated_port['admin_state_up'] = True
self.devices_up_list = []
neutron_port = {'port': updated_port,
'segmentation_id': port['segmentation_id']}
with mock.patch.object(self.LOG, 'exception'
) as mock_log_exception, \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.port_update(FAKE_CONTEXT, **neutron_port)
self.assertEqual(neutron_port['port']['admin_state_up'],
self.agent.ports_dict[port['id']].
admin_state_up)
self.assertEqual([FAKE_PORT_1], self.agent.devices_up_list)
self.assertFalse(mock_log_exception.called)
self.assertTrue(mock_logger_debug.called)
def test_device_update_maintenance_mode(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = True
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff") as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode"
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep'):
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertTrue(power_off.called)
self.assertTrue(maintenance_mode.called)
self.assertFalse(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=True)
self.assertFalse(log_exception.called)
def test_device_update_shutdown_mode(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = False
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff") as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode"
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep'):
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertFalse(power_off.called)
self.assertFalse(maintenance_mode.called)
self.assertTrue(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=True)
self.assertFalse(log_exception.called)
def test_device_update_ovsvapp_alreadly_powered_off(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = True
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff",
side_effect=Exception()) as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode"
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep'):
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertTrue(power_off.called)
self.assertTrue(maintenance_mode.called)
self.assertFalse(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=True)
self.assertTrue(log_exception.called)
def test_device_update_maintenance_mode_exception(self):
kwargs = {'device_data': {'ovsvapp_agent': 'fake_agent_host_1',
'esx_host_name': FAKE_HOST_1,
'assigned_agent_host': FAKE_HOST_2}}
self.agent.hostname = FAKE_HOST_2
self.agent.esx_maintenance_mode = True
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.net_mgr.get_driver().session = "fake_session"
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.vcenter_id = FAKE_VCENTER
with mock.patch.object(resource_util,
"get_vm_mor_by_name",
return_value="vm_mor") as vm_mor_by_name, \
mock.patch.object(resource_util,
"get_host_mor_by_name",
return_value="host_mor"
) as host_mor_by_name, \
mock.patch.object(resource_util,
"set_vm_poweroff",
side_effect=Exception()) as power_off, \
mock.patch.object(resource_util,
"set_host_into_maintenance_mode",
side_effect=Exception()
) as maintenance_mode, \
mock.patch.object(resource_util,
"set_host_into_shutdown_mode"
) as shutdown_mode, \
mock.patch.object(self.agent.ovsvapp_rpc,
"update_cluster_lock") as cluster_lock, \
mock.patch.object(self.LOG, 'exception') as log_exception, \
mock.patch.object(time, 'sleep') as time_sleep:
self.agent.device_update(FAKE_CONTEXT, **kwargs)
self.assertTrue(vm_mor_by_name.called)
self.assertTrue(host_mor_by_name.called)
self.assertTrue(power_off.called)
self.assertTrue(maintenance_mode.called)
self.assertFalse(shutdown_mode.called)
self.assertTrue(cluster_lock.called)
cluster_lock.assert_called_with(self.agent.context,
cluster_id=self.agent.cluster_id,
vcenter_id=self.agent.vcenter_id,
success=False)
self.assertTrue(log_exception.called)
self.assertTrue(time_sleep.called)
def test_enhanced_sg_provider_updated(self):
kwargs = {'network_id': NETWORK_ID}
with mock.patch.object(self.LOG, 'info') as log_info, \
mock.patch.object(self.agent.sg_agent, "sg_provider_updated"
) as mock_sg_provider_updated:
self.agent.enhanced_sg_provider_updated(FAKE_CONTEXT, **kwargs)
self.assertTrue(log_info.called)
mock_sg_provider_updated.assert_called_with(NETWORK_ID)
def test_device_create_hosted_vm_vlan_multiple_physnet(self):
port1 = self._build_port(FAKE_PORT_1)
port2 = self._build_port(FAKE_PORT_2)
port2['physical_network'] = "physnet2"
port2['segmentation_id'] = "2005"
port2['network_id'] = "fake_net2"
ports = [port1, port2]
self._build_phys_brs(port1)
self._build_phys_brs(port2)
self.agent.phys_ofports = {}
self.agent.phys_ofports[port1['physical_network']] = 4
self.agent.phys_ofports[port2['physical_network']] = 5
self.agent.vcenter_id = FAKE_VCENTER
self.agent.cluster_id = FAKE_CLUSTER_1
self.agent.cluster_moid = FAKE_CLUSTER_MOID
self.agent.esx_hostname = FAKE_HOST_1
self.agent.tenant_network_types = [p_const.TYPE_VLAN]
self.agent.devices_up_list = []
self.agent.net_mgr = fake_manager.MockNetworkManager("callback")
self.agent.net_mgr.initialize_driver()
self.agent.int_br = mock.Mock()
self.agent.vlan_manager.mapping = {}
self.agent.patch_sec_ofport = 1
self.agent.int_ofports = {'physnet1': 2, 'physnet2': 3}
with mock.patch.object(self.agent.sg_agent, 'add_devices_to_filter'
) as mock_add_devices_fn, \
mock.patch.object(self.agent.sg_agent, 'ovsvapp_sg_update'
), \
mock.patch.object(self.agent.int_br, 'provision_local_vlan'
) as mock_prov_local_vlan, \
mock.patch.object(self.agent.sg_agent, 'expand_sg_rules',
return_value=FAKE_SG_RULES_MULTI_PORTS
), \
mock.patch.object(self.LOG, 'debug') as mock_logger_debug:
self.agent.device_create(FAKE_CONTEXT,
device=DEVICE,
ports=ports,
sg_rules=mock.MagicMock())
self.assertTrue(mock_logger_debug.called)
self.assertEqual([FAKE_PORT_1, FAKE_PORT_2],
self.agent.devices_up_list)
mock_add_devices_fn.assert_called_with(ports)
self.assertTrue(mock_prov_local_vlan.called)
mock_prov_local_vlan.assert_any_call(
port1['network_type'],
port1['lvid'],
port1['segmentation_id'],
self.agent.patch_sec_ofport,
self.agent.int_ofports['physnet1'], None)
mock_prov_local_vlan.assert_any_call(
port2['network_type'],
port2['lvid'],
port2['segmentation_id'],
self.agent.patch_sec_ofport,
self.agent.int_ofports['physnet2'], None)
| 52.54364
| 96
| 0.594227
| 12,944
| 113,179
| 4.812809
| 0.03492
| 0.10344
| 0.067419
| 0.074723
| 0.91311
| 0.880476
| 0.861133
| 0.831164
| 0.802559
| 0.783649
| 0
| 0.010182
| 0.318822
| 113,179
| 2,153
| 97
| 52.568045
| 0.797873
| 0.005619
| 0
| 0.752343
| 0
| 0
| 0.090015
| 0.038089
| 0
| 0
| 0.000027
| 0
| 0.189936
| 1
| 0.043907
| false
| 0
| 0.007893
| 0.000493
| 0.057227
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e7ee8f88cffe1a482d5fa7391195738c0119a53d
| 2,228
|
py
|
Python
|
SQLFileGenerator/sqlqueries.py
|
DataMadeEasy/PySQLFileGenerator
|
3efc54fa7b8741f48d00dc199675081b0fc4e04d
|
[
"BSD-2-Clause"
] | null | null | null |
SQLFileGenerator/sqlqueries.py
|
DataMadeEasy/PySQLFileGenerator
|
3efc54fa7b8741f48d00dc199675081b0fc4e04d
|
[
"BSD-2-Clause"
] | null | null | null |
SQLFileGenerator/sqlqueries.py
|
DataMadeEasy/PySQLFileGenerator
|
3efc54fa7b8741f48d00dc199675081b0fc4e04d
|
[
"BSD-2-Clause"
] | null | null | null |
sqlqueries = {
'WeatherForecast':"select concat ('FY', to_char(f.forecasted_timestamp, 'YY')) Fiscal_yr, to_char(f.forecasted_timestamp, 'MON') Fiscal_mth, concat ('Day_', to_char(f.forecasted_timestamp, 'DD')) Fiscal_day, f.zipcode zip, min(f.temp_avg) low, max(f.temp_avg) high, max(f.wind_speed) wind, max(f.humidity) humidity from forecast f where to_char(forecast_timestamp, 'YYYY-MM-DD HH24') = (select max(to_char(forecast_timestamp, 'YYYY-MM-DD HH24')) from forecast) group by to_char(f.forecasted_timestamp, 'YY'), to_char(f.forecasted_timestamp, 'MON'), to_char(f.forecasted_timestamp, 'DD'), f.zipcode;",
'WeatherActDesc':"select concat ('FY', to_char(o.observation_timestamp, 'YY')) Fiscal_yr, to_char(o.observation_timestamp, 'MON') Fiscal_mth, concat ('Day_', to_char(o.observation_timestamp, 'DD')) Fiscal_day, o.zipcode zip, o.weather_description descripion from observations o group by to_char(o.observation_timestamp, 'YY'), to_char(o.observation_timestamp, 'MON'), to_char(o.observation_timestamp, 'DD'), o.zipcode, o.weather_description order by fiscal_yr, fiscal_mth, fiscal_day, zip;",
'WeatherActual':"select concat ('FY', to_char(o.observation_timestamp, 'YY')) Fiscal_yr, to_char(o.observation_timestamp, 'MON') Fiscal_mth, concat ('Day_', to_char(o.observation_timestamp, 'DD')) Fiscal_day, o.zipcode zip, min(o.temp_avg) low, max(o.temp_avg) high, max(o.wind_speed) wind, max(o.humidity) humidity from observations o group by to_char(o.observation_timestamp, 'YY'), to_char(o.observation_timestamp, 'MON') , to_char(o.observation_timestamp, 'DD') , o.zipcode order by fiscal_yr, fiscal_mth, fiscal_day, zip;",
'WeatherDescription':"select concat ('FY', to_char(f.forecasted_timestamp, 'YY')) Fiscal_yr , to_char(f.forecasted_timestamp, 'MON') Fiscal_mth , concat ('Day_', to_char(f.forecasted_timestamp, 'DD')) Fiscal_day , f.zipcode zip , f.weather_description descripion from forecast f where to_char(forecast_timestamp, 'YYYY-MM-DD HH24') = (select max(to_char(forecast_timestamp, 'YYYY-MM-DD HH24')) from forecast) group by to_char(forecasted_timestamp, 'YY') , to_char(f.forecasted_timestamp, 'MON') , to_char(f.forecasted_timestamp, 'DD') , f.zipcode , f.weather_description;"
}
| 371.333333
| 604
| 0.763465
| 344
| 2,228
| 4.688953
| 0.133721
| 0.104154
| 0.052077
| 0.133912
| 0.815251
| 0.815251
| 0.814631
| 0.814631
| 0.814631
| 0.769994
| 0
| 0.003941
| 0.088869
| 2,228
| 6
| 605
| 371.333333
| 0.79064
| 0
| 0
| 0
| 0
| 0.666667
| 0.973082
| 0.428443
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
f06f2cf97d8da48c7ae640dd4974c12d832537f5
| 3,398
|
py
|
Python
|
njdate/hebdfind.py
|
schorrm/njdate
|
5a31d944973904b75f1dbac811fc7393aaa4ed7c
|
[
"MIT"
] | 4
|
2019-07-16T19:58:42.000Z
|
2021-11-17T14:50:17.000Z
|
njdate/hebdfind.py
|
schorrm/njdate
|
5a31d944973904b75f1dbac811fc7393aaa4ed7c
|
[
"MIT"
] | null | null | null |
njdate/hebdfind.py
|
schorrm/njdate
|
5a31d944973904b75f1dbac811fc7393aaa4ed7c
|
[
"MIT"
] | null | null | null |
# Takes two years, and runs an aggressive search for dates in between those two years (inclusive).
import njdate.gematria as gematria
import njdate.ej_generic as ej_generic
import string
specpunc = string.punctuation.replace('"','').replace("'","")
tr_table = str.maketrans("","",specpunc)
def date_aggressor (search_text, begin_year, end_year):
tokens = search_text.translate(tr_table).split()
for search_year in range (begin_year, end_year+1):
if gematria.YearNoToGematria(search_year) in tokens:
query = ' '.join(search_text.partition(gematria.YearNoToGematria(search_year))[:2])
return ej_generic.ExtractDate(query)
if gematria.YearNoToGematria(search_year, False) in tokens:
query = ' '.join(search_text.partition(gematria.YearNoToGematria(search_year, False))[:2])
return ej_generic.ExtractDate(query)
if gematria.YearNoToGematria(search_year, prepend_heh=True) in tokens:
query = ' '.join(search_text.partition(gematria.YearNoToGematria(search_year, False))[:2])
return ej_generic.ExtractDate(query)
return None
def date_aggressor_lamedify (search_text, begin_year, end_year):
tokens = search_text.translate(tr_table).split()
for search_year in range (begin_year, end_year+1):
if gematria.YearNoToGematria(search_year) in tokens:
query = ' '.join(search_text.partition(gematria.YearNoToGematria(search_year))[:2])
return ej_generic.ExtractDate(query)
if gematria.YearNoToGematria(search_year, False) in tokens:
query = ' '.join(search_text.partition(gematria.YearNoToGematria(search_year, False))[:2])
return ej_generic.ExtractDate(query)
if gematria.YearNoToGematria(search_year, False, False) + '"ל' in tokens:
query = ' '.join(search_text.partition(gematria.YearNoToGematria(search_year, False, False))[:2])
return ej_generic.ExtractDate(query)
return None
# For dropped Tafs etc, so we need to add 400 years after what we've found, etc
def yshift_date_aggressor (search_text, begin_year, end_year, shift=400):
# change: move search year and begin year to before shifting 400 years, so the call is the same.
begin_year -= shift
end_year -= shift
tokens = search_text.translate(tr_table).split()
for search_year in range (begin_year, end_year+1):
if gematria.YearNoToGematria(search_year) in tokens:
query = ' '.join(search_text.partition(gematria.YearNoToGematria(search_year))[:2])
return ej_generic.ForceYear(query, search_year+shift)
if gematria.YearNoToGematria(search_year, False) in tokens:
query = ' '.join(search_text.partition(gematria.YearNoToGematria(search_year, False))[:2])
return ej_generic.ForceYear(query, search_year+shift)
return None
def yshift_date_aggressor_lamedify (search_text, begin_year, end_year, shift=400):
tokens = search_text.translate(tr_table).split()
for search_year in range (begin_year, end_year+1):
if gematria.YearNoToGematria(search_year, False, False) + '"ל' in tokens:
query = ' '.join(search_text.partition(gematria.YearNoToGematria(search_year, False, False))[:2])
return ej_generic.ForceYear(query, search_year+shift)
return None
| 57.59322
| 110
| 0.700706
| 428
| 3,398
| 5.364486
| 0.184579
| 0.11324
| 0.235192
| 0.266551
| 0.819686
| 0.819686
| 0.819686
| 0.819686
| 0.79007
| 0.732143
| 0
| 0.009154
| 0.196292
| 3,398
| 58
| 111
| 58.586207
| 0.831564
| 0.079164
| 0
| 0.76
| 0
| 0
| 0.004891
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0
| 0.06
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f0706f06dae68a2eb12befe8740b73ce25344c53
| 10,323
|
py
|
Python
|
tests/test_cli.py
|
redglue/brickops
|
77fbe0da295f69b2b8bfebd0ec2c8b3bfdb1046b
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_cli.py
|
redglue/brickops
|
77fbe0da295f69b2b8bfebd0ec2c8b3bfdb1046b
|
[
"BSD-3-Clause"
] | 3
|
2019-07-23T16:38:14.000Z
|
2021-06-02T03:55:23.000Z
|
tests/test_cli.py
|
aquicore/apparate
|
bc0d9a5db2ffb863ddde4ff61ac2ac0dbc8f1bad
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
from os.path import expanduser, join
from unittest import mock
import pytest
from click.testing import CliRunner
from configparser import ConfigParser
from apparate.configure import configure
from apparate.cli_commands import upload, upload_and_update
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger('apparate.cli_commands')
def test_configure_no_existing_config():
expected_stdout = (
'Databricks host (e.g. https://my-organization.cloud.databricks.com): '
'https://test_host\n'
'Databricks API token: \n'
'Repeat for confirmation: \n'
'Databricks folder for production libraries: test_folder\n'
)
filename = join(expanduser('~'), '.apparatecfg')
expected_call_list = [
mock.call(filename, encoding=None),
mock.call(filename, 'w+'),
mock.call().write('[DEFAULT]\n'),
mock.call().write('host = https://test_host\n'),
mock.call().write('token = test_token\n'),
mock.call().write('prod_folder = test_folder\n'),
mock.call().write('\n'),
]
with mock.patch('builtins.open', mock.mock_open(read_data='')) as m_open:
runner = CliRunner()
result = runner.invoke(
configure,
input=(
'https://test_host\n'
'test_token\n'
'test_token\n'
'test_folder\n'
),
)
m_open.assert_has_calls(expected_call_list, any_order=True)
assert not result.exception
assert result.output == expected_stdout
def test_configure_extra_slash_in_host():
expected_stdout = (
'Databricks host (e.g. https://my-organization.cloud.databricks.com): '
'https://test_host/\n'
'Databricks API token: \n'
'Repeat for confirmation: \n'
'Databricks folder for production libraries: test_folder\n'
)
filename = join(expanduser('~'), '.apparatecfg')
expected_call_list = [
mock.call(filename, encoding=None),
mock.call(filename, 'w+'),
mock.call().write('[DEFAULT]\n'),
mock.call().write('host = https://test_host\n'),
mock.call().write('token = test_token\n'),
mock.call().write('prod_folder = test_folder\n'),
mock.call().write('\n'),
]
with mock.patch('builtins.open', mock.mock_open(read_data='')) as m_open:
runner = CliRunner()
result = runner.invoke(
configure,
input=(
'https://test_host/\n'
'test_token\n'
'test_token\n'
'test_folder\n'
),
)
m_open.assert_has_calls(expected_call_list, any_order=True)
assert not result.exception
assert result.output == expected_stdout
def test_configure_extra_slash_in_folder():
expected_stdout = (
'Databricks host (e.g. https://my-organization.cloud.databricks.com): '
'https://test_host\n'
'Databricks API token: \n'
'Repeat for confirmation: \n'
'Databricks folder for production libraries: test_folder/\n'
)
filename = join(expanduser('~'), '.apparatecfg')
expected_call_list = [
mock.call(filename, encoding=None),
mock.call(filename, 'w+'),
mock.call().write('[DEFAULT]\n'),
mock.call().write('host = https://test_host\n'),
mock.call().write('token = test_token\n'),
mock.call().write('prod_folder = test_folder\n'),
mock.call().write('\n'),
]
with mock.patch('builtins.open', mock.mock_open(read_data='')) as m_open:
runner = CliRunner()
result = runner.invoke(
configure,
input=(
'https://test_host\n'
'test_token\n'
'test_token\n'
'test_folder/\n'
),
)
m_open.assert_has_calls(expected_call_list, any_order=True)
assert not result.exception
assert result.output == expected_stdout
def test_configure_no_http_in_host():
expected_stdout = (
'Databricks host (e.g. https://my-organization.cloud.databricks.com): '
'test_host\n'
"looks like there's an issue - make sure the host name starts "
'with http: https://test_host\n'
'Databricks API token: \n'
'Repeat for confirmation: \n'
'Databricks folder for production libraries: test_folder\n'
)
filename = join(expanduser('~'), '.apparatecfg')
expected_call_list = [
mock.call(filename, encoding=None),
mock.call(filename, 'w+'),
mock.call().write('[DEFAULT]\n'),
mock.call().write('host = https://test_host\n'),
mock.call().write('token = test_token\n'),
mock.call().write('prod_folder = test_folder\n'),
mock.call().write('\n'),
]
with mock.patch('builtins.open', mock.mock_open(read_data='')) as m_open:
runner = CliRunner()
result = runner.invoke(
configure,
input=(
'test_host\n'
'https://test_host\n'
'test_token\n'
'test_token\n'
'test_folder\n'
),
)
m_open.assert_has_calls(expected_call_list, any_order=True)
assert not result.exception
assert result.output == expected_stdout
@pytest.mark.fixture('existing_config')
@mock.patch('apparate.cli_commands._load_config')
@mock.patch('apparate.cli_commands.update_databricks')
def test_upload(update_databricks_mock, config_mock, existing_config):
config_mock.return_value = existing_config
runner = CliRunner()
result = runner.invoke(
upload,
['--path', '/path/to/egg']
)
config_mock.assert_called_once()
update_databricks_mock.assert_called_with(
logger,
'/path/to/egg',
'test_token',
'test_folder',
cleanup=False,
update_jobs=False,
)
assert not result.exception
@pytest.mark.fixture('existing_config')
@mock.patch('apparate.cli_commands._load_config')
@mock.patch('apparate.cli_commands.update_databricks')
def test_upload_all_options(
update_databricks_mock,
config_mock,
existing_config
):
config_mock.return_value = existing_config
runner = CliRunner()
result = runner.invoke(
upload,
[
'--path',
'/path/to/egg',
'--token',
'new_token',
'--folder',
'new_folder'
]
)
config_mock.assert_called_once()
update_databricks_mock.assert_called_with(
logger,
'/path/to/egg',
'new_token',
'new_folder',
cleanup=False,
update_jobs=False,
)
assert not result.exception
@pytest.mark.fixture('empty_config')
@mock.patch('apparate.cli_commands._load_config')
def test_upload_missing_token(config_mock, empty_config):
config_mock.return_value = empty_config
runner = CliRunner()
result = runner.invoke(
upload,
['--path', '/path/to/egg', '--folder', 'test_folder']
)
assert str(result.exception) == (
'no token found - either provide a command line argument or set up'
' a default by running `apparate configure`'
)
@pytest.mark.fixture('empty_config')
@mock.patch('apparate.cli_commands._load_config')
def test_upload_missing_folder(config_mock, empty_config):
config_mock.return_value = empty_config
runner = CliRunner()
result = runner.invoke(
upload,
['--path', '/path/to/egg', '--token', 'test_token']
)
assert str(result.exception) == (
'no folder found - either provide a command line argument or set up'
' a default by running `apparate configure`'
)
@pytest.mark.fixture('existing_config')
@mock.patch('apparate.cli_commands._load_config')
@mock.patch('apparate.cli_commands.update_databricks')
def test_upload_and_update_cleanup(
update_databricks_mock,
config_mock,
existing_config
):
config_mock.return_value = existing_config
runner = CliRunner()
result = runner.invoke(
upload_and_update,
['--path', '/path/to/egg']
)
config_mock.assert_called_once()
update_databricks_mock.assert_called_with(
logger,
'/path/to/egg',
'test_token',
'test_folder',
cleanup=True,
update_jobs=True,
)
assert not result.exception
@pytest.mark.fixture('existing_config')
@mock.patch('apparate.cli_commands._load_config')
@mock.patch('apparate.cli_commands.update_databricks')
def test_upload_and_update_no_cleanup(
update_databricks_mock,
config_mock,
existing_config
):
config_mock.return_value = existing_config
runner = CliRunner()
result = runner.invoke(
upload_and_update,
['--path', '/path/to/egg', '--no-cleanup']
)
config_mock.assert_called_once()
update_databricks_mock.assert_called_with(
logger,
'/path/to/egg',
'test_token',
'test_folder',
cleanup=False,
update_jobs=True,
)
assert not result.exception
@mock.patch('apparate.cli_commands._load_config')
def test_upload_and_update_missing_token(config_mock):
existing_config = ConfigParser()
existing_config['DEFAULT'] = {'prod_folder': 'test_folder'}
config_mock.return_value = existing_config
runner = CliRunner()
result = runner.invoke(
upload_and_update,
['--path', '/path/to/egg']
)
config_mock.assert_called_once()
assert str(result.exception) == (
'no token found - either provide a command line argument or set up'
' a default by running `apparate configure`'
)
@pytest.mark.fixture('empty_config')
@mock.patch('apparate.cli_commands._load_config')
def test_upload_and_update_missing_folder(config_mock, empty_config):
config_mock.return_value = empty_config
runner = CliRunner()
result = runner.invoke(
upload_and_update,
['-p', '/path/to/egg', '--token', 'test_token']
)
config_mock.assert_called_once()
assert str(result.exception) == (
'no folder found - either provide a command line argument or set up'
' a default by running `apparate configure`'
)
| 28.675
| 79
| 0.625303
| 1,206
| 10,323
| 5.107794
| 0.106136
| 0.053571
| 0.042208
| 0.036364
| 0.905357
| 0.905357
| 0.900162
| 0.893344
| 0.893344
| 0.891883
| 0
| 0
| 0.248087
| 10,323
| 359
| 80
| 28.754875
| 0.79361
| 0
| 0
| 0.769492
| 0
| 0
| 0.284317
| 0.043495
| 0
| 0
| 0
| 0
| 0.101695
| 1
| 0.040678
| false
| 0
| 0.027119
| 0
| 0.067797
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f076aaf49a3d8fba6fb5ba17c6020bb113d2de01
| 5,417
|
py
|
Python
|
src/jsonengine/main.py
|
youhengzhou/json-crud-engine
|
8ee614af6dddbe1236a78a7debf71048f476a3ff
|
[
"MIT"
] | 2
|
2021-07-02T04:33:36.000Z
|
2022-01-09T23:40:30.000Z
|
src/jsonengine/main.py
|
youhengzhou/json-crud-engine
|
8ee614af6dddbe1236a78a7debf71048f476a3ff
|
[
"MIT"
] | null | null | null |
src/jsonengine/main.py
|
youhengzhou/json-crud-engine
|
8ee614af6dddbe1236a78a7debf71048f476a3ff
|
[
"MIT"
] | null | null | null |
# JSON engine 21 9 16
# database
# eng.json
# engine
# eng.py
import os
import json
path = os.getcwd() + '\\json_engine_database\\'
path_string = ''
def set_path(string):
global path
path = os.getcwd() + string
def dictionary_kv(dictionary, key, value):
dictionary[key] = value
return dictionary
def set_path_string(args,create_flag):
global path_string
if (args):
path_string = str(args[0]) + '\\'
if os.path.exists(path + path_string)==False:
if create_flag == True:
os.makedirs(path + path_string)
else:
return False
return path_string
def create(dictionary, *args):
path_string = set_path_string(args,True)
with open(path + path_string + 'eng.json', 'w') as outfile:
json.dump(dictionary, outfile, indent=4)
def retrieve(*args):
path_string = set_path_string(args,False)
if path_string == False:
return False
with open(path + path_string + 'eng.json', 'r') as f:
return(json.load(f))
def retrieve_k(key, *args):
path_string = set_path_string(args,False)
if path_string == False:
return False
with open(path + path_string + 'eng.json', 'r') as f:
if key in json.load(f):
with open(path + path_string + 'eng.json', 'r') as f:
return(json.load(f)[key])
else:
return False
def update(dictionary, *args):
path_string = set_path_string(args,False)
if path_string == False:
return False
with open(path + path_string + 'eng.json', 'w') as outfile:
json.dump(dictionary, outfile, indent=4)
return True
def update_kv(key, value, *args):
path_string = set_path_string(args,False)
if path_string == False:
return False
with open(path + path_string + 'eng.json', 'w') as outfile:
json.dump({key: value}, outfile, indent=4)
return True
def patch(dictionary, *args):
path_string = set_path_string(args,False)
if path_string == False:
return False
with open(path + path_string + 'eng.json', 'r') as f:
data=(json.load(f))
data.update(dictionary)
with open(path + path_string + 'eng.json', 'w') as outfile:
json.dump(data, outfile, indent=4)
return True
def patch_kv(key, value, *args):
path_string = set_path_string(args,False)
if path_string == False:
return False
with open(path + path_string + 'eng.json', 'r') as f:
data=(json.load(f))
data.update({key: value})
with open(path + path_string + 'eng.json', 'w') as outfile:
json.dump(data, outfile, indent=4)
return True
def delete(*args):
if (args):
path_string = str(args[0]) + '\\'
if os.path.exists(path + path_string + 'eng.json'):
os.remove(path + path_string + 'eng.json')
os.rmdir(path + path_string)
return True
else:
return False
def delete_k(key, *args):
if (args):
path_string = str(args[0]) + '\\'
if os.path.exists(path + path_string + 'eng.json'):
with open(path + path_string + 'eng.json', 'r') as f:
if key in json.load(f):
data = json.load(f)
data.pop(key)
with open(path + path_string + 'eng.json', 'w') as outfile:
json.dump(data, outfile, indent=4)
return True
else:
return False
else:
return False
def display(*args):
if (args):
path_string = str(args[0]) + '\\'
if os.path.exists(path + path_string + 'eng.json'):
with open(path + path_string + 'eng.json', 'r') as f:
print(json.load(f))
return True
else:
print('The selected file does not exist')
return False
def display_key(key, *args):
if (args):
path_string = str(args[0]) + '\\'
if os.path.exists(path + path_string + 'eng.json'):
with open(path + path_string + 'eng.json', 'r') as f:
if key in json.load(f):
print(key + ' ' + str(json.load(f)[key]))
return True
else:
print('The selected file does not exist')
return False
def display_nkv(key, *args):
if (args):
path_string = str(args[0]) + '\\'
if os.path.exists(path + path_string + 'eng.json'):
with open(path + path_string + 'eng.json', 'r') as f:
if key in json.load(f):
data = json.load(f)
data.pop(key,'key not found')
print(data)
return True
else:
print('The selected file does not exist')
return False
def display_ind(*args):
if (args):
path_string = str(args[0]) + '\\'
if os.path.exists(path + path_string + 'eng.json'):
with open(path + path_string + 'eng.json', 'r') as f:
print(json.dumps(json.load(f), indent=4))
else:
print('The selected file does not exist')
def display_ind_nkv(key, *args):
if (args):
path_string = str(args[0]) + '\\'
if os.path.exists(path + path_string + 'eng.json'):
with open(path + path_string + 'eng.json', 'r') as f:
data = json.load(f)
data.pop(key,'key not found')
print(json.dumps(data, indent=4))
else:
print('The selected file does not exist')
| 31.132184
| 75
| 0.568027
| 741
| 5,417
| 4.040486
| 0.087719
| 0.203741
| 0.130929
| 0.141951
| 0.808283
| 0.794923
| 0.782231
| 0.769873
| 0.756179
| 0.756179
| 0
| 0.005523
| 0.298136
| 5,417
| 173
| 76
| 31.312139
| 0.781957
| 0.012368
| 0
| 0.706667
| 0
| 0
| 0.083084
| 0.004491
| 0
| 0
| 0
| 0
| 0
| 1
| 0.113333
| false
| 0
| 0.013333
| 0
| 0.293333
| 0.066667
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b2bb1c7a2af64e0803771a48f87683d4a4a1c0d2
| 50,483
|
py
|
Python
|
cottonformation/res/lookoutmetrics.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
cottonformation/res/lookoutmetrics.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
cottonformation/res/lookoutmetrics.py
|
gitter-badger/cottonformation-project
|
354f1dce7ea106e209af2d5d818b6033a27c193c
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
This module
"""
import attr
import typing
from ..core.model import (
Property, Resource, Tag, GetAtt, TypeHint, TypeCheck,
)
from ..core.constant import AttrMeta
#--- Property declaration ---
@attr.s
class AnomalyDetectorCsvFormatDescriptor(Property):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector.CsvFormatDescriptor"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-csvformatdescriptor.html
Property Document:
- ``p_Charset``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-csvformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-csvformatdescriptor-charset
- ``p_ContainsHeader``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-csvformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-csvformatdescriptor-containsheader
- ``p_Delimiter``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-csvformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-csvformatdescriptor-delimiter
- ``p_FileCompression``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-csvformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-csvformatdescriptor-filecompression
- ``p_HeaderList``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-csvformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-csvformatdescriptor-headerlist
- ``p_QuoteSymbol``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-csvformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-csvformatdescriptor-quotesymbol
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector.CsvFormatDescriptor"
p_Charset: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Charset"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-csvformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-csvformatdescriptor-charset"""
p_ContainsHeader: bool = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(bool)),
metadata={AttrMeta.PROPERTY_NAME: "ContainsHeader"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-csvformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-csvformatdescriptor-containsheader"""
p_Delimiter: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Delimiter"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-csvformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-csvformatdescriptor-delimiter"""
p_FileCompression: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "FileCompression"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-csvformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-csvformatdescriptor-filecompression"""
p_HeaderList: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "HeaderList"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-csvformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-csvformatdescriptor-headerlist"""
p_QuoteSymbol: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "QuoteSymbol"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-csvformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-csvformatdescriptor-quotesymbol"""
@attr.s
class AnomalyDetectorVpcConfiguration(Property):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector.VpcConfiguration"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-vpcconfiguration.html
Property Document:
- ``rp_SecurityGroupIdList``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-vpcconfiguration.html#cfn-lookoutmetrics-anomalydetector-vpcconfiguration-securitygroupidlist
- ``rp_SubnetIdList``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-vpcconfiguration.html#cfn-lookoutmetrics-anomalydetector-vpcconfiguration-subnetidlist
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector.VpcConfiguration"
rp_SecurityGroupIdList: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "SecurityGroupIdList"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-vpcconfiguration.html#cfn-lookoutmetrics-anomalydetector-vpcconfiguration-securitygroupidlist"""
rp_SubnetIdList: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "SubnetIdList"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-vpcconfiguration.html#cfn-lookoutmetrics-anomalydetector-vpcconfiguration-subnetidlist"""
@attr.s
class AnomalyDetectorRDSSourceConfig(Property):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector.RDSSourceConfig"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html
Property Document:
- ``rp_DBInstanceIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-dbinstanceidentifier
- ``rp_DatabaseHost``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-databasehost
- ``rp_DatabaseName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-databasename
- ``rp_DatabasePort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-databaseport
- ``rp_RoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-rolearn
- ``rp_SecretManagerArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-secretmanagerarn
- ``rp_TableName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-tablename
- ``rp_VpcConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-vpcconfiguration
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector.RDSSourceConfig"
rp_DBInstanceIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DBInstanceIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-dbinstanceidentifier"""
rp_DatabaseHost: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DatabaseHost"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-databasehost"""
rp_DatabaseName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DatabaseName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-databasename"""
rp_DatabasePort: int = attr.ib(
default=None,
validator=attr.validators.instance_of(int),
metadata={AttrMeta.PROPERTY_NAME: "DatabasePort"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-databaseport"""
rp_RoleArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RoleArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-rolearn"""
rp_SecretManagerArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SecretManagerArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-secretmanagerarn"""
rp_TableName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "TableName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-tablename"""
rp_VpcConfiguration: typing.Union['AnomalyDetectorVpcConfiguration', dict] = attr.ib(
default=None,
converter=AnomalyDetectorVpcConfiguration.from_dict,
validator=attr.validators.instance_of(AnomalyDetectorVpcConfiguration),
metadata={AttrMeta.PROPERTY_NAME: "VpcConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-rdssourceconfig.html#cfn-lookoutmetrics-anomalydetector-rdssourceconfig-vpcconfiguration"""
@attr.s
class AnomalyDetectorTimestampColumn(Property):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector.TimestampColumn"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-timestampcolumn.html
Property Document:
- ``p_ColumnFormat``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-timestampcolumn.html#cfn-lookoutmetrics-anomalydetector-timestampcolumn-columnformat
- ``p_ColumnName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-timestampcolumn.html#cfn-lookoutmetrics-anomalydetector-timestampcolumn-columnname
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector.TimestampColumn"
p_ColumnFormat: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ColumnFormat"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-timestampcolumn.html#cfn-lookoutmetrics-anomalydetector-timestampcolumn-columnformat"""
p_ColumnName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "ColumnName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-timestampcolumn.html#cfn-lookoutmetrics-anomalydetector-timestampcolumn-columnname"""
@attr.s
class AnomalyDetectorJsonFormatDescriptor(Property):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector.JsonFormatDescriptor"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-jsonformatdescriptor.html
Property Document:
- ``p_Charset``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-jsonformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-jsonformatdescriptor-charset
- ``p_FileCompression``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-jsonformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-jsonformatdescriptor-filecompression
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector.JsonFormatDescriptor"
p_Charset: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Charset"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-jsonformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-jsonformatdescriptor-charset"""
p_FileCompression: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "FileCompression"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-jsonformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-jsonformatdescriptor-filecompression"""
@attr.s
class AnomalyDetectorAppFlowConfig(Property):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector.AppFlowConfig"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-appflowconfig.html
Property Document:
- ``rp_FlowName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-appflowconfig.html#cfn-lookoutmetrics-anomalydetector-appflowconfig-flowname
- ``rp_RoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-appflowconfig.html#cfn-lookoutmetrics-anomalydetector-appflowconfig-rolearn
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector.AppFlowConfig"
rp_FlowName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "FlowName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-appflowconfig.html#cfn-lookoutmetrics-anomalydetector-appflowconfig-flowname"""
rp_RoleArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RoleArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-appflowconfig.html#cfn-lookoutmetrics-anomalydetector-appflowconfig-rolearn"""
@attr.s
class AnomalyDetectorRedshiftSourceConfig(Property):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector.RedshiftSourceConfig"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html
Property Document:
- ``rp_ClusterIdentifier``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-clusteridentifier
- ``rp_DatabaseHost``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-databasehost
- ``rp_DatabaseName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-databasename
- ``rp_DatabasePort``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-databaseport
- ``rp_RoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-rolearn
- ``rp_SecretManagerArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-secretmanagerarn
- ``rp_TableName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-tablename
- ``rp_VpcConfiguration``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-vpcconfiguration
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector.RedshiftSourceConfig"
rp_ClusterIdentifier: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "ClusterIdentifier"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-clusteridentifier"""
rp_DatabaseHost: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DatabaseHost"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-databasehost"""
rp_DatabaseName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "DatabaseName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-databasename"""
rp_DatabasePort: int = attr.ib(
default=None,
validator=attr.validators.instance_of(int),
metadata={AttrMeta.PROPERTY_NAME: "DatabasePort"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-databaseport"""
rp_RoleArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RoleArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-rolearn"""
rp_SecretManagerArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SecretManagerArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-secretmanagerarn"""
rp_TableName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "TableName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-tablename"""
rp_VpcConfiguration: typing.Union['AnomalyDetectorVpcConfiguration', dict] = attr.ib(
default=None,
converter=AnomalyDetectorVpcConfiguration.from_dict,
validator=attr.validators.instance_of(AnomalyDetectorVpcConfiguration),
metadata={AttrMeta.PROPERTY_NAME: "VpcConfiguration"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-redshiftsourceconfig.html#cfn-lookoutmetrics-anomalydetector-redshiftsourceconfig-vpcconfiguration"""
@attr.s
class AnomalyDetectorMetric(Property):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector.Metric"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metric.html
Property Document:
- ``rp_AggregationFunction``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metric.html#cfn-lookoutmetrics-anomalydetector-metric-aggregationfunction
- ``rp_MetricName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metric.html#cfn-lookoutmetrics-anomalydetector-metric-metricname
- ``p_Namespace``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metric.html#cfn-lookoutmetrics-anomalydetector-metric-namespace
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector.Metric"
rp_AggregationFunction: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "AggregationFunction"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metric.html#cfn-lookoutmetrics-anomalydetector-metric-aggregationfunction"""
rp_MetricName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "MetricName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metric.html#cfn-lookoutmetrics-anomalydetector-metric-metricname"""
p_Namespace: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Namespace"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metric.html#cfn-lookoutmetrics-anomalydetector-metric-namespace"""
@attr.s
class AnomalyDetectorCloudwatchConfig(Property):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector.CloudwatchConfig"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-cloudwatchconfig.html
Property Document:
- ``rp_RoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-cloudwatchconfig.html#cfn-lookoutmetrics-anomalydetector-cloudwatchconfig-rolearn
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector.CloudwatchConfig"
rp_RoleArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RoleArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-cloudwatchconfig.html#cfn-lookoutmetrics-anomalydetector-cloudwatchconfig-rolearn"""
@attr.s
class AnomalyDetectorFileFormatDescriptor(Property):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector.FileFormatDescriptor"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-fileformatdescriptor.html
Property Document:
- ``p_CsvFormatDescriptor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-fileformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-fileformatdescriptor-csvformatdescriptor
- ``p_JsonFormatDescriptor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-fileformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-fileformatdescriptor-jsonformatdescriptor
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector.FileFormatDescriptor"
p_CsvFormatDescriptor: typing.Union['AnomalyDetectorCsvFormatDescriptor', dict] = attr.ib(
default=None,
converter=AnomalyDetectorCsvFormatDescriptor.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(AnomalyDetectorCsvFormatDescriptor)),
metadata={AttrMeta.PROPERTY_NAME: "CsvFormatDescriptor"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-fileformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-fileformatdescriptor-csvformatdescriptor"""
p_JsonFormatDescriptor: typing.Union['AnomalyDetectorJsonFormatDescriptor', dict] = attr.ib(
default=None,
converter=AnomalyDetectorJsonFormatDescriptor.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(AnomalyDetectorJsonFormatDescriptor)),
metadata={AttrMeta.PROPERTY_NAME: "JsonFormatDescriptor"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-fileformatdescriptor.html#cfn-lookoutmetrics-anomalydetector-fileformatdescriptor-jsonformatdescriptor"""
@attr.s
class AnomalyDetectorS3SourceConfig(Property):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector.S3SourceConfig"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-s3sourceconfig.html
Property Document:
- ``rp_FileFormatDescriptor``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-s3sourceconfig.html#cfn-lookoutmetrics-anomalydetector-s3sourceconfig-fileformatdescriptor
- ``rp_RoleArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-s3sourceconfig.html#cfn-lookoutmetrics-anomalydetector-s3sourceconfig-rolearn
- ``p_HistoricalDataPathList``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-s3sourceconfig.html#cfn-lookoutmetrics-anomalydetector-s3sourceconfig-historicaldatapathlist
- ``p_TemplatedPathList``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-s3sourceconfig.html#cfn-lookoutmetrics-anomalydetector-s3sourceconfig-templatedpathlist
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector.S3SourceConfig"
rp_FileFormatDescriptor: typing.Union['AnomalyDetectorFileFormatDescriptor', dict] = attr.ib(
default=None,
converter=AnomalyDetectorFileFormatDescriptor.from_dict,
validator=attr.validators.instance_of(AnomalyDetectorFileFormatDescriptor),
metadata={AttrMeta.PROPERTY_NAME: "FileFormatDescriptor"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-s3sourceconfig.html#cfn-lookoutmetrics-anomalydetector-s3sourceconfig-fileformatdescriptor"""
rp_RoleArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RoleArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-s3sourceconfig.html#cfn-lookoutmetrics-anomalydetector-s3sourceconfig-rolearn"""
p_HistoricalDataPathList: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "HistoricalDataPathList"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-s3sourceconfig.html#cfn-lookoutmetrics-anomalydetector-s3sourceconfig-historicaldatapathlist"""
p_TemplatedPathList: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "TemplatedPathList"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-s3sourceconfig.html#cfn-lookoutmetrics-anomalydetector-s3sourceconfig-templatedpathlist"""
@attr.s
class AnomalyDetectorMetricSource(Property):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector.MetricSource"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricsource.html
Property Document:
- ``p_AppFlowConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricsource.html#cfn-lookoutmetrics-anomalydetector-metricsource-appflowconfig
- ``p_CloudwatchConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricsource.html#cfn-lookoutmetrics-anomalydetector-metricsource-cloudwatchconfig
- ``p_RDSSourceConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricsource.html#cfn-lookoutmetrics-anomalydetector-metricsource-rdssourceconfig
- ``p_RedshiftSourceConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricsource.html#cfn-lookoutmetrics-anomalydetector-metricsource-redshiftsourceconfig
- ``p_S3SourceConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricsource.html#cfn-lookoutmetrics-anomalydetector-metricsource-s3sourceconfig
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector.MetricSource"
p_AppFlowConfig: typing.Union['AnomalyDetectorAppFlowConfig', dict] = attr.ib(
default=None,
converter=AnomalyDetectorAppFlowConfig.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(AnomalyDetectorAppFlowConfig)),
metadata={AttrMeta.PROPERTY_NAME: "AppFlowConfig"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricsource.html#cfn-lookoutmetrics-anomalydetector-metricsource-appflowconfig"""
p_CloudwatchConfig: typing.Union['AnomalyDetectorCloudwatchConfig', dict] = attr.ib(
default=None,
converter=AnomalyDetectorCloudwatchConfig.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(AnomalyDetectorCloudwatchConfig)),
metadata={AttrMeta.PROPERTY_NAME: "CloudwatchConfig"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricsource.html#cfn-lookoutmetrics-anomalydetector-metricsource-cloudwatchconfig"""
p_RDSSourceConfig: typing.Union['AnomalyDetectorRDSSourceConfig', dict] = attr.ib(
default=None,
converter=AnomalyDetectorRDSSourceConfig.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(AnomalyDetectorRDSSourceConfig)),
metadata={AttrMeta.PROPERTY_NAME: "RDSSourceConfig"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricsource.html#cfn-lookoutmetrics-anomalydetector-metricsource-rdssourceconfig"""
p_RedshiftSourceConfig: typing.Union['AnomalyDetectorRedshiftSourceConfig', dict] = attr.ib(
default=None,
converter=AnomalyDetectorRedshiftSourceConfig.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(AnomalyDetectorRedshiftSourceConfig)),
metadata={AttrMeta.PROPERTY_NAME: "RedshiftSourceConfig"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricsource.html#cfn-lookoutmetrics-anomalydetector-metricsource-redshiftsourceconfig"""
p_S3SourceConfig: typing.Union['AnomalyDetectorS3SourceConfig', dict] = attr.ib(
default=None,
converter=AnomalyDetectorS3SourceConfig.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(AnomalyDetectorS3SourceConfig)),
metadata={AttrMeta.PROPERTY_NAME: "S3SourceConfig"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricsource.html#cfn-lookoutmetrics-anomalydetector-metricsource-s3sourceconfig"""
@attr.s
class AnomalyDetectorMetricSet(Property):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector.MetricSet"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html
Property Document:
- ``rp_MetricList``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-metriclist
- ``rp_MetricSetName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-metricsetname
- ``rp_MetricSource``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-metricsource
- ``p_DimensionList``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-dimensionlist
- ``p_MetricSetDescription``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-metricsetdescription
- ``p_MetricSetFrequency``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-metricsetfrequency
- ``p_Offset``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-offset
- ``p_TimestampColumn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-timestampcolumn
- ``p_Timezone``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-timezone
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector.MetricSet"
rp_MetricList: typing.List[typing.Union['AnomalyDetectorMetric', dict]] = attr.ib(
default=None,
converter=AnomalyDetectorMetric.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(AnomalyDetectorMetric), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "MetricList"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-metriclist"""
rp_MetricSetName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "MetricSetName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-metricsetname"""
rp_MetricSource: typing.Union['AnomalyDetectorMetricSource', dict] = attr.ib(
default=None,
converter=AnomalyDetectorMetricSource.from_dict,
validator=attr.validators.instance_of(AnomalyDetectorMetricSource),
metadata={AttrMeta.PROPERTY_NAME: "MetricSource"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-metricsource"""
p_DimensionList: typing.List[TypeHint.intrinsic_str] = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "DimensionList"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-dimensionlist"""
p_MetricSetDescription: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "MetricSetDescription"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-metricsetdescription"""
p_MetricSetFrequency: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "MetricSetFrequency"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-metricsetfrequency"""
p_Offset: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
metadata={AttrMeta.PROPERTY_NAME: "Offset"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-offset"""
p_TimestampColumn: typing.Union['AnomalyDetectorTimestampColumn', dict] = attr.ib(
default=None,
converter=AnomalyDetectorTimestampColumn.from_dict,
validator=attr.validators.optional(attr.validators.instance_of(AnomalyDetectorTimestampColumn)),
metadata={AttrMeta.PROPERTY_NAME: "TimestampColumn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-timestampcolumn"""
p_Timezone: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Timezone"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lookoutmetrics-anomalydetector-metricset.html#cfn-lookoutmetrics-anomalydetector-metricset-timezone"""
#--- Resource declaration ---
@attr.s
class Alert(Resource):
"""
AWS Object Type = "AWS::LookoutMetrics::Alert"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-alert.html
Property Document:
- ``rp_Action``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-alert.html#cfn-lookoutmetrics-alert-action
- ``rp_AlertSensitivityThreshold``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-alert.html#cfn-lookoutmetrics-alert-alertsensitivitythreshold
- ``rp_AnomalyDetectorArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-alert.html#cfn-lookoutmetrics-alert-anomalydetectorarn
- ``p_AlertDescription``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-alert.html#cfn-lookoutmetrics-alert-alertdescription
- ``p_AlertName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-alert.html#cfn-lookoutmetrics-alert-alertname
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::Alert"
rp_Action: dict = attr.ib(
default=None,
validator=attr.validators.instance_of(dict),
metadata={AttrMeta.PROPERTY_NAME: "Action"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-alert.html#cfn-lookoutmetrics-alert-action"""
rp_AlertSensitivityThreshold: int = attr.ib(
default=None,
validator=attr.validators.instance_of(int),
metadata={AttrMeta.PROPERTY_NAME: "AlertSensitivityThreshold"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-alert.html#cfn-lookoutmetrics-alert-alertsensitivitythreshold"""
rp_AnomalyDetectorArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "AnomalyDetectorArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-alert.html#cfn-lookoutmetrics-alert-anomalydetectorarn"""
p_AlertDescription: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "AlertDescription"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-alert.html#cfn-lookoutmetrics-alert-alertdescription"""
p_AlertName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "AlertName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-alert.html#cfn-lookoutmetrics-alert-alertname"""
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-alert.html#aws-resource-lookoutmetrics-alert-return-values"""
return GetAtt(resource=self, attr_name="Arn")
@attr.s
class AnomalyDetector(Resource):
"""
AWS Object Type = "AWS::LookoutMetrics::AnomalyDetector"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-anomalydetector.html
Property Document:
- ``rp_AnomalyDetectorConfig``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-anomalydetector.html#cfn-lookoutmetrics-anomalydetector-anomalydetectorconfig
- ``rp_MetricSetList``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-anomalydetector.html#cfn-lookoutmetrics-anomalydetector-metricsetlist
- ``p_AnomalyDetectorDescription``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-anomalydetector.html#cfn-lookoutmetrics-anomalydetector-anomalydetectordescription
- ``p_AnomalyDetectorName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-anomalydetector.html#cfn-lookoutmetrics-anomalydetector-anomalydetectorname
- ``p_KmsKeyArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-anomalydetector.html#cfn-lookoutmetrics-anomalydetector-kmskeyarn
"""
AWS_OBJECT_TYPE = "AWS::LookoutMetrics::AnomalyDetector"
rp_AnomalyDetectorConfig: dict = attr.ib(
default=None,
validator=attr.validators.instance_of(dict),
metadata={AttrMeta.PROPERTY_NAME: "AnomalyDetectorConfig"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-anomalydetector.html#cfn-lookoutmetrics-anomalydetector-anomalydetectorconfig"""
rp_MetricSetList: typing.List[typing.Union['AnomalyDetectorMetricSet', dict]] = attr.ib(
default=None,
converter=AnomalyDetectorMetricSet.from_list,
validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(AnomalyDetectorMetricSet), iterable_validator=attr.validators.instance_of(list)),
metadata={AttrMeta.PROPERTY_NAME: "MetricSetList"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-anomalydetector.html#cfn-lookoutmetrics-anomalydetector-metricsetlist"""
p_AnomalyDetectorDescription: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "AnomalyDetectorDescription"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-anomalydetector.html#cfn-lookoutmetrics-anomalydetector-anomalydetectordescription"""
p_AnomalyDetectorName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "AnomalyDetectorName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-anomalydetector.html#cfn-lookoutmetrics-anomalydetector-anomalydetectorname"""
p_KmsKeyArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "KmsKeyArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-anomalydetector.html#cfn-lookoutmetrics-anomalydetector-kmskeyarn"""
@property
def rv_Arn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-lookoutmetrics-anomalydetector.html#aws-resource-lookoutmetrics-anomalydetector-return-values"""
return GetAtt(resource=self, attr_name="Arn")
| 72.952312
| 244
| 0.792247
| 4,905
| 50,483
| 8.059123
| 0.027727
| 0.205414
| 0.040349
| 0.062358
| 0.909006
| 0.909006
| 0.895447
| 0.851278
| 0.851202
| 0.848039
| 0
| 0.000632
| 0.090466
| 50,483
| 691
| 245
| 73.057887
| 0.860288
| 0.346453
| 0
| 0.457801
| 0
| 0
| 0.103117
| 0.063617
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005115
| false
| 0
| 0.01023
| 0
| 0.26087
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6513f213b7ae02509adbae8bebb7d8031a8a3323
| 110
|
py
|
Python
|
pond5/blueprints/webui/views.py
|
IvanFrezzaJr/pond5
|
02cc39262b33eac59727ee416645f7006a99b099
|
[
"MIT"
] | null | null | null |
pond5/blueprints/webui/views.py
|
IvanFrezzaJr/pond5
|
02cc39262b33eac59727ee416645f7006a99b099
|
[
"MIT"
] | null | null | null |
pond5/blueprints/webui/views.py
|
IvanFrezzaJr/pond5
|
02cc39262b33eac59727ee416645f7006a99b099
|
[
"MIT"
] | null | null | null |
from flask import render_template
def index():
return render_template("index.html", title='pond5 test')
| 18.333333
| 60
| 0.745455
| 15
| 110
| 5.333333
| 0.8
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010638
| 0.145455
| 110
| 5
| 61
| 22
| 0.840426
| 0
| 0
| 0
| 0
| 0
| 0.183486
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
651ec842e96c1b1f0d0e9ffe6067c0199b8a1424
| 124
|
py
|
Python
|
pyvat/utils.py
|
ponyville/pyvat
|
75fb781d3c00cf323544eb929a96344c1978e2c8
|
[
"Apache-2.0"
] | 48
|
2015-07-22T12:02:20.000Z
|
2022-02-07T16:54:13.000Z
|
pyvat/utils.py
|
ponyville/pyvat
|
75fb781d3c00cf323544eb929a96344c1978e2c8
|
[
"Apache-2.0"
] | 34
|
2015-03-27T17:47:38.000Z
|
2022-02-08T18:14:55.000Z
|
pyvat/utils.py
|
ponyville/pyvat
|
75fb781d3c00cf323544eb929a96344c1978e2c8
|
[
"Apache-2.0"
] | 40
|
2015-04-08T14:03:06.000Z
|
2022-02-09T12:29:04.000Z
|
from decimal import Decimal
def ensure_decimal(value):
return value if isinstance(value, Decimal) else Decimal(value)
| 20.666667
| 66
| 0.782258
| 17
| 124
| 5.647059
| 0.588235
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153226
| 124
| 5
| 67
| 24.8
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
6534325409884c8b43e265c56070a9cc57567e0b
| 42
|
py
|
Python
|
examples/phobos/tests/test_std_system.py
|
kinke/autowrap
|
2f042df3f292aa39b1da0b9607fbe3424f56ff4a
|
[
"BSD-3-Clause"
] | 47
|
2019-07-16T10:38:07.000Z
|
2022-03-30T16:34:24.000Z
|
examples/phobos/tests/test_std_system.py
|
kinke/autowrap
|
2f042df3f292aa39b1da0b9607fbe3424f56ff4a
|
[
"BSD-3-Clause"
] | 199
|
2019-06-17T23:24:40.000Z
|
2021-06-16T16:41:36.000Z
|
examples/phobos/tests/test_std_system.py
|
kinke/autowrap
|
2f042df3f292aa39b1da0b9607fbe3424f56ff4a
|
[
"BSD-3-Clause"
] | 7
|
2019-09-13T18:03:49.000Z
|
2022-01-17T03:53:00.000Z
|
def test_import():
import std_system
| 10.5
| 21
| 0.714286
| 6
| 42
| 4.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 42
| 3
| 22
| 14
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 1
| 0
| 1.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e8e88d99dce51ba1201b6f79edf2263f1faa8d1a
| 125
|
py
|
Python
|
microtbs_rl/algorithms/common/__init__.py
|
alex-petrenko/simple-reinforcement-learning
|
d0da1d9026d1f05e2552d08e56fbe58ad869fafd
|
[
"MIT"
] | 8
|
2018-03-05T05:13:39.000Z
|
2021-02-27T03:12:05.000Z
|
microtbs_rl/algorithms/common/__init__.py
|
alex-petrenko/simple-reinforcement-learning
|
d0da1d9026d1f05e2552d08e56fbe58ad869fafd
|
[
"MIT"
] | null | null | null |
microtbs_rl/algorithms/common/__init__.py
|
alex-petrenko/simple-reinforcement-learning
|
d0da1d9026d1f05e2552d08e56fbe58ad869fafd
|
[
"MIT"
] | 4
|
2018-09-04T04:44:26.000Z
|
2021-07-22T06:34:51.000Z
|
from microtbs_rl.algorithms.common.agent import AgentLearner
from microtbs_rl.algorithms.common.loops import run_policy_loop
| 41.666667
| 63
| 0.888
| 18
| 125
| 5.944444
| 0.666667
| 0.224299
| 0.261682
| 0.448598
| 0.560748
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064
| 125
| 2
| 64
| 62.5
| 0.91453
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e8f4043d5536bdca2c37406c6cd15241be633a78
| 21,362
|
py
|
Python
|
tests/test_managedblockchain/test_managedblockchain_proposalvotes.py
|
junelife/moto
|
e61d794cbc9c18b06c11014da666e25f3fce637b
|
[
"Apache-2.0"
] | 1
|
2021-12-12T04:23:06.000Z
|
2021-12-12T04:23:06.000Z
|
tests/test_managedblockchain/test_managedblockchain_proposalvotes.py
|
junelife/moto
|
e61d794cbc9c18b06c11014da666e25f3fce637b
|
[
"Apache-2.0"
] | 2
|
2018-08-07T10:47:18.000Z
|
2018-08-08T15:13:04.000Z
|
tests/test_managedblockchain/test_managedblockchain_proposalvotes.py
|
junelife/moto
|
e61d794cbc9c18b06c11014da666e25f3fce637b
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import unicode_literals
import os
import boto3
import sure # noqa
from freezegun import freeze_time
from unittest import SkipTest
from moto import mock_managedblockchain, settings
from . import helpers
@mock_managedblockchain
def test_vote_on_proposal_one_member_total_yes():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
# Create proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("IN_PROGRESS")
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# List proposal votes
response = conn.list_proposal_votes(NetworkId=network_id, ProposalId=proposal_id)
response["ProposalVotes"][0]["MemberId"].should.equal(member_id)
# Get proposal details - should be APPROVED
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["Status"].should.equal("APPROVED")
response["Proposal"]["YesVoteCount"].should.equal(1)
response["Proposal"]["NoVoteCount"].should.equal(0)
response["Proposal"]["OutstandingVoteCount"].should.equal(0)
@mock_managedblockchain
def test_vote_on_proposal_one_member_total_no():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
# Create proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("IN_PROGRESS")
# Vote no
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="NO",
)
# List proposal votes
response = conn.list_proposal_votes(NetworkId=network_id, ProposalId=proposal_id)
response["ProposalVotes"][0]["MemberId"].should.equal(member_id)
# Get proposal details - should be REJECTED
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["Status"].should.equal("REJECTED")
response["Proposal"]["YesVoteCount"].should.equal(0)
response["Proposal"]["NoVoteCount"].should.equal(1)
response["Proposal"]["OutstandingVoteCount"].should.equal(0)
@mock_managedblockchain
def test_vote_on_proposal_yes_greater_than():
conn = boto3.client("managedblockchain", region_name="us-east-1")
votingpolicy = {
"ApprovalThresholdPolicy": {
"ThresholdPercentage": 50,
"ProposalDurationInHours": 24,
"ThresholdComparator": "GREATER_THAN",
}
}
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Get the invitation
response = conn.list_invitations()
invitation_id = response["Invitations"][0]["InvitationId"]
# Create the member
response = conn.create_member(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember2", "admin", "Admin12345", False, "Test Member 2"
),
)
member_id2 = response["MemberId"]
# Create another proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes with member 1
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("IN_PROGRESS")
# Vote no with member 2
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id2,
Vote="NO",
)
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["Status"].should.equal("REJECTED")
@mock_managedblockchain
def test_vote_on_proposal_no_greater_than():
conn = boto3.client("managedblockchain", region_name="us-east-1")
votingpolicy = {
"ApprovalThresholdPolicy": {
"ThresholdPercentage": 50,
"ProposalDurationInHours": 24,
"ThresholdComparator": "GREATER_THAN",
}
}
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Get the invitation
response = conn.list_invitations()
invitation_id = response["Invitations"][0]["InvitationId"]
# Create the member
response = conn.create_member(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember2", "admin", "Admin12345", False, "Test Member 2"
),
)
member_id2 = response["MemberId"]
# Create another proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote no with member 1
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="NO",
)
# Vote no with member 2
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id2,
Vote="NO",
)
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("REJECTED")
@mock_managedblockchain
def test_vote_on_proposal_expiredproposal():
if os.environ.get("TEST_SERVER_MODE", "false").lower() == "true":
raise SkipTest("Cant manipulate time in server mode")
votingpolicy = {
"ApprovalThresholdPolicy": {
"ThresholdPercentage": 50,
"ProposalDurationInHours": 1,
"ThresholdComparator": "GREATER_THAN_OR_EQUAL_TO",
}
}
conn = boto3.client("managedblockchain", region_name="us-east-1")
with freeze_time("2015-01-01 12:00:00"):
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
with freeze_time("2015-02-01 12:00:00"):
# Vote yes - should set status to expired
response = conn.vote_on_proposal.when.called_with(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
).should.throw(
Exception,
"Proposal {0} is expired and you cannot vote on it.".format(proposal_id),
)
# Get proposal details - should be EXPIRED
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["Status"].should.equal("EXPIRED")
@mock_managedblockchain
def test_vote_on_proposal_status_check():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
# Create 2 more members
for counter in range(2, 4):
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
memberidlist = [None, None, None]
memberidlist[0] = member_id
for counter in range(2, 4):
# Get the invitation
response = conn.list_invitations()
invitation_id = helpers.select_invitation_id_for_network(
response["Invitations"], network_id, "PENDING"
)[0]
# Create the member
response = conn.create_member(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember" + str(counter),
"admin",
"Admin12345",
False,
"Test Member " + str(counter),
),
)
member_id = response["MemberId"]
memberidlist[counter - 1] = member_id
# Should be no more pending invitations
response = conn.list_invitations()
pendinginvs = helpers.select_invitation_id_for_network(
response["Invitations"], network_id, "PENDING"
)
pendinginvs.should.have.length_of(0)
# Create another proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes with member 1
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=memberidlist[0],
Vote="YES",
)
# Vote yes with member 2
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=memberidlist[1],
Vote="YES",
)
# Get proposal details - now approved (2 yes, 1 outstanding)
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("APPROVED")
# Should be one pending invitation
response = conn.list_invitations()
pendinginvs = helpers.select_invitation_id_for_network(
response["Invitations"], network_id, "PENDING"
)
pendinginvs.should.have.length_of(1)
# Vote with member 3 - should throw an exception and not create a new invitation
response = conn.vote_on_proposal.when.called_with(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=memberidlist[2],
Vote="YES",
).should.throw(Exception, "and you cannot vote on it")
# Should still be one pending invitation
response = conn.list_invitations()
pendinginvs = helpers.select_invitation_id_for_network(
response["Invitations"], network_id, "PENDING"
)
pendinginvs.should.have.length_of(1)
@mock_managedblockchain
def test_vote_on_proposal_badnetwork():
conn = boto3.client("managedblockchain", region_name="us-east-1")
response = conn.vote_on_proposal.when.called_with(
NetworkId="n-ABCDEFGHIJKLMNOP0123456789",
ProposalId="p-ABCDEFGHIJKLMNOP0123456789",
VoterMemberId="m-ABCDEFGHIJKLMNOP0123456789",
Vote="YES",
).should.throw(Exception, "Network n-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_vote_on_proposal_badproposal():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
response = conn.vote_on_proposal.when.called_with(
NetworkId=network_id,
ProposalId="p-ABCDEFGHIJKLMNOP0123456789",
VoterMemberId="m-ABCDEFGHIJKLMNOP0123456789",
Vote="YES",
).should.throw(Exception, "Proposal p-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_vote_on_proposal_badmember():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
response = conn.vote_on_proposal.when.called_with(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId="m-ABCDEFGHIJKLMNOP0123456789",
Vote="YES",
).should.throw(Exception, "Member m-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_vote_on_proposal_badvote():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
response = conn.vote_on_proposal.when.called_with(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="FOO",
).should.throw(Exception, "Invalid request body")
@mock_managedblockchain
def test_vote_on_proposal_alreadyvoted():
conn = boto3.client("managedblockchain", region_name="us-east-1")
votingpolicy = {
"ApprovalThresholdPolicy": {
"ThresholdPercentage": 50,
"ProposalDurationInHours": 24,
"ThresholdComparator": "GREATER_THAN",
}
}
# Create network - need a good network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Vote yes
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Get the invitation
response = conn.list_invitations()
invitation_id = response["Invitations"][0]["InvitationId"]
# Create the member
response = conn.create_member(
InvitationId=invitation_id,
NetworkId=network_id,
MemberConfiguration=helpers.create_member_configuration(
"testmember2", "admin", "Admin12345", False, "Test Member 2"
),
)
# Create another proposal
response = conn.create_proposal(
NetworkId=network_id,
MemberId=member_id,
Actions=helpers.default_policy_actions,
)
proposal_id = response["ProposalId"]
# Get proposal details
response = conn.get_proposal(NetworkId=network_id, ProposalId=proposal_id)
response["Proposal"]["NetworkId"].should.equal(network_id)
response["Proposal"]["Status"].should.equal("IN_PROGRESS")
# Vote yes with member 1
response = conn.vote_on_proposal(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
)
# Vote yes with member 1 again
response = conn.vote_on_proposal.when.called_with(
NetworkId=network_id,
ProposalId=proposal_id,
VoterMemberId=member_id,
Vote="YES",
).should.throw(
Exception,
"Member {0} has already voted on proposal {1}.".format(member_id, proposal_id),
)
@mock_managedblockchain
def test_list_proposal_votes_badnetwork():
conn = boto3.client("managedblockchain", region_name="us-east-1")
response = conn.list_proposal_votes.when.called_with(
NetworkId="n-ABCDEFGHIJKLMNOP0123456789",
ProposalId="p-ABCDEFGHIJKLMNOP0123456789",
).should.throw(Exception, "Network n-ABCDEFGHIJKLMNOP0123456789 not found")
@mock_managedblockchain
def test_list_proposal_votes_badproposal():
conn = boto3.client("managedblockchain", region_name="us-east-1")
# Create network
response = conn.create_network(
Name="testnetwork1",
Framework="HYPERLEDGER_FABRIC",
FrameworkVersion="1.2",
FrameworkConfiguration=helpers.default_frameworkconfiguration,
VotingPolicy=helpers.default_votingpolicy,
MemberConfiguration=helpers.default_memberconfiguration,
)
network_id = response["NetworkId"]
member_id = response["MemberId"]
response = conn.list_proposal_votes.when.called_with(
NetworkId=network_id, ProposalId="p-ABCDEFGHIJKLMNOP0123456789",
).should.throw(Exception, "Proposal p-ABCDEFGHIJKLMNOP0123456789 not found")
| 31.836066
| 87
| 0.679805
| 2,130
| 21,362
| 6.595305
| 0.078404
| 0.044846
| 0.062785
| 0.066629
| 0.936503
| 0.913724
| 0.902548
| 0.888027
| 0.878203
| 0.852719
| 0
| 0.01877
| 0.221889
| 21,362
| 670
| 88
| 31.883582
| 0.826375
| 0.06535
| 0
| 0.786
| 0
| 0
| 0.151253
| 0.03014
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026
| false
| 0
| 0.016
| 0
| 0.042
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
68584b0872eb1fd43baeac7af1a1487c2977e004
| 21,365
|
py
|
Python
|
session.py
|
cheng6076/virnng
|
d790c02833865c43cb8afb2552a75c9445365f24
|
[
"Apache-2.0"
] | 13
|
2017-05-18T22:44:22.000Z
|
2020-09-16T14:19:49.000Z
|
session.py
|
cheng6076/virnng
|
d790c02833865c43cb8afb2552a75c9445365f24
|
[
"Apache-2.0"
] | 1
|
2018-07-02T12:08:15.000Z
|
2018-07-02T12:08:15.000Z
|
session.py
|
cheng6076/virnng
|
d790c02833865c43cb8afb2552a75c9445365f24
|
[
"Apache-2.0"
] | 3
|
2017-11-08T11:51:17.000Z
|
2019-11-03T23:18:50.000Z
|
from encoder import Encoder
from decoder import Decoder
from parser import Parser
from baseline import *
from language_model import LanguageModel
from util import Reader
import dynet as dy
from misc import compute_eval_score, compute_perplexity
import os
initializers = {'glorot': dy.GlorotInitializer(),
'constant': dy.ConstInitializer(0.01),
'uniform': dy.UniformInitializer(0.1),
'normal': dy.NormalInitializer(mean = 0, var = 1)
}
optimizers = {
"sgd": dy.SimpleSGDTrainer,
"adam": dy.AdamTrainer,
"adadelta": dy.AdadeltaTrainer,
"adagrad": dy.AdagradTrainer
}
class Session(object):
def __init__(self, options):
self.reader = Reader(options.data_dir, options.data_augment)
self.options = options
def supervised_enc(self):
encoder = self.create_encoder()
if os.path.exists(self.options.result_dir + 'model_enc'):
self.load_encoder(encoder)
enc_trainer = optimizers[self.options.optimizer](encoder.model)
lr = self.options.lr #used only for sgd
i = 0
best_f1 = 0
print ('supervised training for encoder...')
for epoch in range(self.options.epochs):
sents = 0
total_loss = 0.0
train = self.reader.next_example(0)
train_size = len(self.reader.data[0])
for data in train:
s1, s2, s3, pos, act = data[0], data[1], data[2], data[3], data[4]
loss = encoder.train(s1, s2, s3, pos, act, self.options.enc_dropout)
sents += 1
if loss is not None:
total_loss += loss.scalar_value()
loss.backward()
if self.options.optimizer == 'sgd':
enc_trainer.update(lr)
else:
enc_trainer.update()
e = float(i) / train_size
if i % self.options.print_every == 0:
print('epoch {}: loss per sentence: {}'.format(e, total_loss / sents))
sents = 0
total_loss = 0.0
if i!=0 and i % self.options.save_every == 0:
print('computing loss on validation set...')
valid = self.reader.next_example(2) #fix this
valid_size = len(self.reader.data[2])
rf = open(self.options.result_dir+'result', 'w')
for vdata in valid:
s1, s2, s3, pos, act = vdata[0], vdata[1], vdata[2], vdata[3], vdata[4]
_, output, _ = encoder.parse(s1, s2, s3, pos)
rf.write(output + '\n')
rf.close()
f1 = compute_eval_score(self.options.result_dir)
if f1 > best_f1:
best_f1 = f1
print ('highest f1: {}'.format(f1))
print ('saving model...')
encoder.Save(self.options.result_dir + 'model_enc')
else:
lr = lr * self.options.decay
i += 1
def supervised_dec(self):
decoder = self.create_decoder()
if os.path.exists(self.options.result_dir + 'model_dec'):
self.load_decoder(decoder)
dec_trainer = optimizers[self.options.optimizer](decoder.model)
lr = self.options.lr #used only for sgd
i = 0
lowest_valid_loss = 9999
print ('supervised training for decoder...')
for epoch in range(self.options.epochs):
sents = 0
total_loss = 0.0
train = self.reader.next_example(0)
train_size = len(self.reader.data[0])
for data in train:
s1, s2, s3, pos, act = data[0], data[1], data[2], data[3], data[4]
loss, loss_act, loss_word = decoder.compute_loss(s3, act, self.options.dec_dropout)
sents += 1
if loss is not None:
total_loss += loss.scalar_value()
loss.backward()
if self.options.optimizer == 'sgd':
dec_trainer.update(lr)
else:
dec_trainer.update()
e = float(i) / train_size
if i % self.options.print_every == 0:
print('epoch {}: loss per sentence: {}'.format(e, total_loss / sents))
sents = 0
total_loss = 0.0
if i!=0 and i % self.options.save_every == 0:
print('computing loss on validation set...')
total_valid_loss = 0
valid = self.reader.next_example(1)
valid_size = len(self.reader.data[1])
for vdata in valid:
s1, s2, s3, pos, act = vdata[0], vdata[1], vdata[2], vdata[3], vdata[4]
valid_loss, _, _ = decoder.compute_loss(s3, act)
if valid_loss is not None:
total_valid_loss += valid_loss.scalar_value()
total_valid_loss = total_valid_loss * 1.0 / valid_size
if total_valid_loss < lowest_valid_loss:
lowest_valid_loss = total_valid_loss
print ('saving model...')
decoder.Save(self.options.result_dir + 'model_dec')
else:
lr = lr * self.options.decay
i += 1
def unsupervised_with_baseline(self):
decoder = self.create_decoder()
assert(os.path.exists(self.options.result_dir + 'model_dec'))
self.load_decoder(decoder)
encoder = self.create_encoder()
assert(os.path.exists(self.options.result_dir + 'model_enc'))
self.load_encoder(encoder)
baseline = self.create_baseline()
if os.path.exists(self.options.result_dir + 'baseline'):
self.load_baseline(baseline)
enc_trainer = optimizers[self.options.optimizer](encoder.model)
dec_trainer = optimizers[self.options.optimizer](decoder.model)
baseline_trainer = optimizers[self.options.optimizer](baseline.model)
lr = self.options.lr #used only for sgd
i = 0
lowest_valid_loss = 9999
print ('unsupervised training...')
for epoch in range(self.options.epochs):
sents = 0
total_loss = 0.0
train = self.reader.next_example(0)
train_size = len(self.reader.data[0])
for data in train:
s1, s2, s3, pos, act = data[0], data[1], data[2], data[3], data[4]
sents += 1
# random sample
enc_loss_act, _, act = encoder.parse(s1, s2, s3, pos, sample=True)
_, dec_loss_act, dec_loss_word = decoder.compute_loss(s3, act)
# save reward
logpx = -dec_loss_word.scalar_value()
total_loss -= logpx
# reconstruction and regularization loss backprop to theta_d
dec_loss_total = dec_loss_word + dec_loss_act * dy.scalarInput(self.options.dec_reg)
dec_loss_total = dec_loss_total * dy.scalarInput(1.0 / self.options.mcsamples)
dec_loss_total.scalar_value()
dec_loss_total.backward()
# update decoder
if self.options.optimizer == 'sgd':
dec_trainer.update(lr)
else:
dec_trainer.update()
if self.options.enc_update > 0:
# compute baseline and backprop to theta_b
b = baseline(s3)
logpxb = b.scalar_value()
b_loss = dy.squared_distance(b, dy.scalarInput(logpx))
b_loss.value()
b_loss.backward()
# update baseline
if self.options.optimizer == 'sgd':
baseline_trainer.update(lr)
else:
baseline_trainer.update()
# policy and and regularization loss backprop to theta_e
enc_loss_act = encoder.train(s1, s2, s3, pos, act)
enc_loss_policy = enc_loss_act * dy.scalarInput((logpx - logpxb) / len(s1))
enc_loss_total = enc_loss_policy * dy.scalarInput(self.options.enc_update) - enc_loss_act * dy.scalarInput(self.options.enc_reg)
enc_loss_total = enc_loss_total * dy.scalarInput(1.0 / self.options.mcsamples)
enc_loss_total.value()
enc_loss_total.backward()
# update encoder
if self.options.optimizer == 'sgd':
enc_trainer.update(lr)
else:
enc_trainer.update()
e = float(i) / train_size
if i % self.options.print_every == 0:
print('epoch {}: loss per sentence: {}'.format(e, total_loss / sents))
sents = 0
total_loss = 0.0
if i!=0 and i % self.options.save_every == 0:
print('computing loss on validation set...')
total_valid_loss = 0
valid = self.reader.next_example(1)
valid_size = len(self.reader.data[1])
for vdata in valid:
s1, s2, s3, pos, act = vdata[0], vdata[1], vdata[2], vdata[3], vdata[4]
_, _, valid_word_loss = decoder.compute_loss(s3, act)
if valid_word_loss is not None:
total_valid_loss += valid_word_loss.scalar_value()
total_valid_loss = total_valid_loss * 1.0 / valid_size
if total_valid_loss < lowest_valid_loss:
lowest_valid_loss = total_valid_loss
print ('saving model...')
encoder.Save(self.options.result_dir + 'model_enc')
decoder.Save(self.options.result_dir + 'model_dec')
baseline.Save(self.options.result_dir + 'baseline')
else:
lr = lr * self.options.decay
i += 1
def unsupervised_without_baseline(self):
decoder = self.create_decoder()
assert(os.path.exists(self.options.result_dir + 'model_dec'))
self.load_decoder(decoder)
encoder = self.create_encoder()
assert(os.path.exists(self.options.result_dir + 'model_enc'))
self.load_encoder(encoder)
enc_trainer = optimizers[self.options.optimizer](encoder.model)
dec_trainer = optimizers[self.options.optimizer](decoder.model)
lr = self.options.lr #used only for sgd
i = 0
lowest_valid_loss = 9999
print ('unsupervised training...')
for epoch in range(self.options.epochs):
sents = 0
total_loss = 0.0
train = self.reader.next_example(0)
train_size = len(self.reader.data[0])
for data in train:
s1, s2, s3, pos, act = data[0], data[1], data[2], data[3], data[4]
sents += 1
# max sample
enc_loss_act, _, act = encoder.parse(s1, s2, s3, pos, sample=False)
_, dec_loss_act, dec_loss_word = decoder.compute_loss(s3, act)
logpxb = -dec_loss_word.scalar_value()
total_loss -= logpxb
# random sample
enc_loss_act, _, act = encoder.parse(s1, s2, s3, pos, sample=True)
_, dec_loss_act, dec_loss_word = decoder.compute_loss(s3, act)
# save reward
logpx = -dec_loss_word.scalar_value()
# reconstruction and regularization loss backprop to theta_d
dec_loss_total = dec_loss_word + dec_loss_act * dy.scalarInput(self.options.dec_reg)
dec_loss_total = dec_loss_total * dy.scalarInput(1.0 / self.options.mcsamples)
dec_loss_total.scalar_value()
dec_loss_total.backward()
# update decoder
if self.options.optimizer == 'sgd':
dec_trainer.update(lr)
else:
dec_trainer.update()
if self.options.enc_update > 0:
# policy and and regularization loss backprop to theta_e
enc_loss_act = encoder.train(s1, s2, s3, pos, act)
enc_loss_policy = enc_loss_act * dy.scalarInput((logpx - logpxb) / len(s1))
enc_loss_total = enc_loss_policy * dy.scalarInput(self.options.enc_update) - enc_loss_act * dy.scalarInput(self.options.enc_reg)
enc_loss_total = enc_loss_total * dy.scalarInput(1.0 / self.options.mcsamples)
enc_loss_total.value()
enc_loss_total.backward()
if self.options.optimizer == 'sgd':
enc_trainer.update(lr)
else:
enc_trainer.update()
e = float(i) / train_size
if i % self.options.print_every == 0:
print('epoch {}: loss per sentence: {}'.format(e, total_loss / sents))
sents = 0
total_loss = 0.0
if i!=0 and i % self.options.save_every == 0:
print('computing loss on validation set...')
total_valid_loss = 0
valid = self.reader.next_example(1)
valid_size = len(self.reader.data[1])
for vdata in valid:
s1, s2, s3, pos, act = vdata[0], vdata[1], vdata[2], vdata[3], vdata[4]
_, _, valid_word_loss = decoder.compute_loss(s3, act)
if valid_word_loss is not None:
total_valid_loss += valid_word_loss.scalar_value()
total_valid_loss = total_valid_loss * 1.0 / valid_size
if total_valid_loss < lowest_valid_loss:
lowest_valid_loss = total_valid_loss
print ('saving model...')
encoder.Save(self.options.result_dir + 'model_enc')
decoder.Save(self.options.result_dir + 'model_dec')
else:
lr = lr * self.options.decay
i += 1
def pretrain_baseline(self):
baseline = self.create_baseline()
if os.path.exists(self.options.result_dir + 'baseline'):
self.load_baseline(baseline)
baseline_trainer = optimizers[self.options.optimizer](baseline.model)
lr = self.options.lr #used only for sgd
i = 0
lowest_valid_loss = 9999
print ('train baseline, for simplicity use the same data here')
for epoch in range(self.options.epochs):
sents = 0
total_loss = 0.0
train = self.reader.next_example(0)
train_size = len(self.reader.data[0])
for data in train:
s1, s2, s3, pos, act = data[0], data[1], data[2], data[3], data[4]
sents += 1
loss = -baseline(s3)
if loss is not None:
total_loss += loss.scalar_value()
loss.backward()
if self.options.optimizer == 'sgd':
baseline_trainer.update(lr)
else:
baseline_trainer.update()
e = float(i) / train_size
if i % self.options.print_every == 0:
print('epoch {}: loss per sentence: {}'.format(e, total_loss / sents))
sents = 0
total_loss = 0.0
if i!=0 and i % self.options.save_every == 0:
print('computing loss on validation set...')
total_valid_loss = 0
valid = self.reader.next_example(1)
valid_size = len(self.reader.data[1])
for vdata in valid:
s1, s2, s3, pos, act = vdata[0], vdata[1], vdata[2], vdata[3], vdata[4]
valid_loss = -baseline(s3)
if valid_loss is not None:
total_valid_loss += valid_loss.scalar_value()
total_valid_loss = total_valid_loss * 1.0 / valid_size
if total_valid_loss < lowest_valid_loss:
lowest_valid_loss = total_valid_loss
print ('saving model...')
baseline.Save(self.options.result_dir + 'baseline')
else:
lr = lr * self.options.decay
i += 1
def parsing(self):
decoder = self.create_decoder()
assert(os.path.exists(self.options.result_dir + 'model_dec'))
self.load_decoder(decoder)
encoder = self.create_encoder()
assert(os.path.exists(self.options.result_dir + 'model_enc'))
self.load_encoder(encoder)
print('parsing...')
rf = open(os.path.join(self.options.result_dir, 'result'), 'w')
test = self.reader.next_example(2)
p = Parser(encoder, decoder)
for dataid, data in enumerate(test):
s1, s2, s3, pos, act = data[0], data[1], data[2], data[3], data[4]
output = p(s1, s2, s3, pos, self.options.nsamples)
rf.write(output + '\n')
rf.close()
f1 = compute_eval_score(self.options.result_dir)
print('bracket F1 score is {}'.format(f1))
def language_modeling(self):
decoder = self.create_decoder()
assert(os.path.exists(self.options.result_dir + 'model_dec'))
self.load_decoder(decoder)
encoder = self.create_encoder()
assert(os.path.exists(self.options.result_dir + 'model_enc'))
self.load_encoder(encoder)
print('computing language model score...')
test = self.reader.next_example(2)
lm = LanguageModel(encoder, decoder)
total_ll = 0
total_tokens = 0
for dataid, data in enumerate(test):
s1, s2, s3, pos, act = data[0], data[1], data[2], data[3], data[4]
if len(s1) <= 1:
continue
total_ll += lm(s1, s2, s3, pos, self.options.nsamples)
total_tokens += len(s1)
perp = compute_perplexity(total_ll, total_tokens)
print('perplexity: {}'.format(perp))
def create_decoder(self):
return Decoder(self.reader,
self.options.nlayers,
self.options.word_dim,
self.options.pretrained_dim,
self.options.action_dim,
self.options.dec_lstm_dim,
self.options.embedding_file)
def create_encoder(self):
return Encoder(self.reader,
self.options.nlayers,
self.options.word_dim,
self.options.pretrained_dim,
self.options.pos_dim,
self.options.action_dim,
self.options.enc_lstm_dim,
self.options.embedding_file)
def create_baseline(self):
baseline = None
if self.options.baseline == 'rnnlm':
baseline = LanguageModelBaseline(self.reader,
self.options.word_dim,
self.options.pretrained_dim,
self.options.dec_lstm_dim,
self.options.embedding_file)
elif self.options.baseline == 'rnnauto':
baseline = RNNAutoencBaseline(self.reader,
self.options.word_dim,
self.options.pretrained_dim,
self.options.dec_lstm_dim,
self.options.embedding_file)
elif self.options.baseline == 'mlp':
baseline = MLPAutoencBaseline(self.reader,
self.options.word_dim,
self.options.pretrained_dim,
self.options.embedding_file)
else:
raise NotImplementedError("Baseline Not Implmented")
return baseline
def load_decoder(self, decoder):
decoder.Load(self.options.result_dir + 'model_dec')
def load_encoder(self, encoder):
encoder.Load(self.options.result_dir + 'model_enc')
def load_baseline(self, baseline):
baseline.Load(self.options.result_dir + 'baseline')
| 42.475149
| 148
| 0.514299
| 2,355
| 21,365
| 4.472611
| 0.082803
| 0.11801
| 0.043577
| 0.051267
| 0.839362
| 0.829868
| 0.809266
| 0.791133
| 0.784297
| 0.781164
| 0
| 0.021434
| 0.390733
| 21,365
| 502
| 149
| 42.559761
| 0.787739
| 0.022841
| 0
| 0.765432
| 0
| 0
| 0.04785
| 0
| 0
| 0
| 0
| 0
| 0.019753
| 1
| 0.034568
| false
| 0
| 0.022222
| 0.004938
| 0.066667
| 0.074074
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d7b09b3899eab74c7da7d05d1a738239da6a6f8c
| 93
|
py
|
Python
|
backoffice/transactions/utils.py
|
AlejandroUPC/pythonmicroservices
|
9d42bd6dfd9847ad4a8e6029e808de927292c251
|
[
"MIT"
] | null | null | null |
backoffice/transactions/utils.py
|
AlejandroUPC/pythonmicroservices
|
9d42bd6dfd9847ad4a8e6029e808de927292c251
|
[
"MIT"
] | null | null | null |
backoffice/transactions/utils.py
|
AlejandroUPC/pythonmicroservices
|
9d42bd6dfd9847ad4a8e6029e808de927292c251
|
[
"MIT"
] | null | null | null |
import random
def create_random_id():
return str(random.randint(100000,999999999999999))
| 23.25
| 54
| 0.795699
| 12
| 93
| 6
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.253012
| 0.107527
| 93
| 4
| 54
| 23.25
| 0.614458
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
d7da6c76e88c3141152b6b83c5b2260da1ff5ebc
| 168
|
py
|
Python
|
classification/admin/__init__.py
|
SACGF/variantgrid
|
515195e2f03a0da3a3e5f2919d8e0431babfd9c9
|
[
"RSA-MD"
] | 5
|
2021-01-14T03:34:42.000Z
|
2022-03-07T15:34:18.000Z
|
classification/admin/__init__.py
|
SACGF/variantgrid
|
515195e2f03a0da3a3e5f2919d8e0431babfd9c9
|
[
"RSA-MD"
] | 551
|
2020-10-19T00:02:38.000Z
|
2022-03-30T02:18:22.000Z
|
classification/admin/__init__.py
|
SACGF/variantgrid
|
515195e2f03a0da3a3e5f2919d8e0431babfd9c9
|
[
"RSA-MD"
] | null | null | null |
from classification.admin.classification_admin import *
from classification.admin.clinvar_export_admin import *
from classification.admin.condition_text_admin import *
| 42
| 55
| 0.875
| 20
| 168
| 7.1
| 0.4
| 0.535211
| 0.485915
| 0.408451
| 0.478873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 168
| 3
| 56
| 56
| 0.910256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
041d25b3caf16615ffe44105dc3916c9eb43a246
| 10,048
|
py
|
Python
|
tf_DDPG.py
|
laket/DDPG_Eager
|
e0b6f960acc193838189b714e67cd1f7da60f440
|
[
"Apache-2.0"
] | 2
|
2019-05-13T18:43:17.000Z
|
2019-08-22T08:08:03.000Z
|
tf_DDPG.py
|
DeanLeeFumu/DDPG_Eager
|
e0b6f960acc193838189b714e67cd1f7da60f440
|
[
"Apache-2.0"
] | null | null | null |
tf_DDPG.py
|
DeanLeeFumu/DDPG_Eager
|
e0b6f960acc193838189b714e67cd1f7da60f440
|
[
"Apache-2.0"
] | 1
|
2019-08-23T02:45:16.000Z
|
2019-08-23T02:45:16.000Z
|
# Copyright 2018 Oiki Tomoaki. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
DDPG implementation in Tensorflow Eager Execution
"""
import numpy as np
import tensorflow as tf
from utils import PytorchInitializer
layers = tf.keras.layers
regularizers = tf.keras.regularizers
losses = tf.keras.losses
class Actor(tf.keras.Model):
def __init__(self, state_dim, action_dim, max_action, name="Actor"):
super().__init__(name=name)
self.l1 = layers.Dense(400, kernel_initializer=PytorchInitializer(),
name="L1")
self.l2 = layers.Dense(300, kernel_initializer=PytorchInitializer(),
name="L2")
self.l3 = layers.Dense(action_dim, kernel_initializer=PytorchInitializer(),
name="L3")
self.max_action = max_action
# 後段の処理のために早めにshapeを確定させる
dummy_state = tf.constant(np.zeros(shape=[1, state_dim], dtype=np.float32))
self(dummy_state)
def call(self, inputs):
with tf.device("/gpu:0"):
features = tf.nn.relu(self.l1(inputs))
features = tf.nn.relu(self.l2(features))
features = self.l3(features)
action = self.max_action * tf.nn.tanh(features)
return action
class Critic(tf.keras.Model):
def __init__(self, state_dim, action_dim, wd=1e-2, name="Critic"):
super().__init__(name=name)
self.l1 = layers.Dense(400, kernel_initializer=PytorchInitializer(),
kernel_regularizer=regularizers.l2(wd), bias_regularizer=regularizers.l2(wd),
name="L1")
self.l2 = layers.Dense(300, kernel_initializer=PytorchInitializer(),
kernel_regularizer=regularizers.l2(wd), bias_regularizer=regularizers.l2(wd),
name="L2")
self.l3 = layers.Dense(1, kernel_initializer=PytorchInitializer(),
kernel_regularizer=regularizers.l2(wd), bias_regularizer=regularizers.l2(wd),
name="L3")
dummy_state = tf.constant(np.zeros(shape=[1, state_dim], dtype=np.float32))
dummy_action = tf.constant(np.zeros(shape=[1, action_dim], dtype=np.float32))
self([dummy_state, dummy_action])
def call(self, inputs):
with tf.device("/gpu:0"):
x, u = inputs
x = tf.nn.relu(self.l1(x))
inner_feat = tf.concat([x, u], axis=1)
x = tf.nn.relu(self.l2(inner_feat))
x = self.l3(x)
return x
class DDPG(tf.contrib.checkpoint.Checkpointable):
def __init__(self, state_dim, action_dim, max_action):
self.actor = Actor(state_dim, action_dim, max_action)
self.actor_target = Actor(state_dim, action_dim, max_action)
# initialize target network
for param, target_param in zip(self.actor.weights, self.actor_target.weights):
target_param.assign(param)
self.actor_optimizer = tf.train.AdamOptimizer(learning_rate=1e-4)
self.critic = Critic(state_dim, action_dim)
self.critic_target = Critic(state_dim, action_dim)
# initialize target network
for param, target_param in zip(self.critic.weights, self.critic_target.weights):
target_param.assign(param)
self.critic_optimizer = tf.train.AdamOptimizer(learning_rate=1e-3)
def select_action(self, state):
"""
:param np.ndarray state:
:return:
"""
assert isinstance(state, np.ndarray)
assert len(state.shape) == 1
state = np.expand_dims(state, axis=0).astype(np.float32)
action = self.actor(state).numpy()
return action[0]
def train(self, replay_buffer, iterations, batch_size=64, discount=0.99, tau=0.001):
for it in range(iterations):
state, next_state, action, reward, done = replay_buffer.sample(batch_size)
state = np.array(state, dtype=np.float32)
next_state = np.array(next_state, dtype=np.float32)
action = np.array(action, dtype=np.float32)
reward = np.array(reward, dtype=np.float32)
done = np.array(done, dtype=np.float32)
not_done = 1 - done
with tf.device("/gpu:0"):
with tf.GradientTape() as tape:
target_Q = self.critic_target([next_state, self.actor_target(next_state)])
target_Q = reward + (not_done * discount * target_Q)
# detach => stop_gradient
target_Q = tf.stop_gradient(target_Q)
current_Q = self.critic([state, action])
# Compute critic loss + L2 loss
critic_loss = tf.reduce_mean(losses.MSE(current_Q, target_Q)) + 0.5*tf.add_n(self.critic.losses)
critic_grad = tape.gradient(critic_loss, self.critic.trainable_variables)
self.critic_optimizer.apply_gradients(zip(critic_grad, self.critic.trainable_variables))
with tf.GradientTape() as tape:
next_action = self.actor(state)
actor_loss = -tf.reduce_mean(self.critic([state, next_action]))
actor_grad = tape.gradient(actor_loss, self.actor.trainable_variables)
self.actor_optimizer.apply_gradients(zip(actor_grad, self.actor.trainable_variables))
# Update target networks
for param, target_param in zip(self.critic.weights, self.critic_target.weights):
target_param.assign(tau * param + (1 - tau) * target_param)
for param, target_param in zip(self.actor.weights, self.actor_target.weights):
target_param.assign(tau * param + (1 - tau) * target_param)
class DDPG_fast(tf.contrib.checkpoint.Checkpointable):
def __init__(self, state_dim, action_dim, max_action):
self.actor = Actor(state_dim, action_dim, max_action)
self.actor_target = Actor(state_dim, action_dim, max_action)
self.actor_optimizer = tf.train.AdamOptimizer(learning_rate=1e-4)
# initialize target network
for param, target_param in zip(self.actor.weights, self.actor_target.weights):
target_param.assign(param)
self.critic = Critic(state_dim, action_dim)
self.critic_target = Critic(state_dim, action_dim)
self.critic_optimizer = tf.train.AdamOptimizer(learning_rate=1e-3)
# initialize target network
for param, target_param in zip(self.critic.weights, self.critic_target.weights):
target_param.assign(param)
def select_action(self, state):
"""
:param np.ndarray state:
:return:
"""
assert isinstance(state, np.ndarray)
assert len(state.shape) == 1
state = np.expand_dims(state, axis=0).astype(np.float32)
action = self._select_action_body(tf.constant(state))
return action.numpy()[0]
@tf.contrib.eager.defun
def _select_action_body(self, state):
"""
:param np.ndarray state:
:return:
"""
action = self.actor(state)
return action
def train(self, replay_buffer, iterations, batch_size=64, discount=0.99, tau=0.001):
for it in range(iterations):
state, next_state, action, reward, done = replay_buffer.sample(batch_size)
state = np.array(state, dtype=np.float32)
next_state = np.array(next_state, dtype=np.float32)
action = np.array(action, dtype=np.float32)
reward = np.array(reward, dtype=np.float32)
done = np.array(done, dtype=np.float32)
not_done = 1 - done
self._train_body(state, next_state, action, reward, not_done, discount, tau)
@tf.contrib.eager.defun
def _train_body(self, state, next_state, action, reward, not_done, discount, tau):
with tf.device("/gpu:0"):
with tf.GradientTape() as tape:
target_Q = self.critic_target([next_state, self.actor_target(next_state)])
target_Q = reward + (not_done * discount * target_Q)
# detach => stop_gradient
target_Q = tf.stop_gradient(target_Q)
current_Q = self.critic([state, action])
# Compute critic loss + L2 loss
critic_loss = tf.reduce_mean(losses.MSE(current_Q, target_Q)) + 0.5*tf.add_n(self.critic.losses)
critic_grad = tape.gradient(critic_loss, self.critic.trainable_variables)
self.critic_optimizer.apply_gradients(zip(critic_grad, self.critic.trainable_variables))
with tf.GradientTape() as tape:
next_action = self.actor(state)
actor_loss = -tf.reduce_mean(self.critic([state, next_action]))
actor_grad = tape.gradient(actor_loss, self.actor.trainable_variables)
self.actor_optimizer.apply_gradients(zip(actor_grad, self.actor.trainable_variables))
# Update target networks
for param, target_param in zip(self.critic.weights, self.critic_target.weights):
target_param.assign(tau * param + (1 - tau) * target_param)
for param, target_param in zip(self.actor.weights, self.actor_target.weights):
target_param.assign(tau * param + (1 - tau) * target_param)
| 40.192
| 116
| 0.624104
| 1,241
| 10,048
| 4.868654
| 0.158743
| 0.046342
| 0.030122
| 0.033764
| 0.8143
| 0.79477
| 0.784012
| 0.770937
| 0.770937
| 0.743959
| 0
| 0.017444
| 0.264033
| 10,048
| 249
| 117
| 40.353414
| 0.799594
| 0.107982
| 0
| 0.751724
| 0
| 0
| 0.00532
| 0
| 0
| 0
| 0
| 0
| 0.027586
| 1
| 0.082759
| false
| 0
| 0.02069
| 0
| 0.165517
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0431b86b5fec8a3a6766b7ed7c6bc620c17ad76e
| 86
|
py
|
Python
|
functionalities.py
|
Dilkovak/Naggy-Bot
|
7727a77d1916336d6f3f52efc40437a9d1ae960b
|
[
"MIT"
] | null | null | null |
functionalities.py
|
Dilkovak/Naggy-Bot
|
7727a77d1916336d6f3f52efc40437a9d1ae960b
|
[
"MIT"
] | null | null | null |
functionalities.py
|
Dilkovak/Naggy-Bot
|
7727a77d1916336d6f3f52efc40437a9d1ae960b
|
[
"MIT"
] | null | null | null |
import random
def coinflip():
# print(random.random())
return random.random()
| 17.2
| 28
| 0.674419
| 10
| 86
| 5.8
| 0.6
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 86
| 5
| 29
| 17.2
| 0.828571
| 0.255814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
f0994ccd4067e97928229e99678562d13da50032
| 5,254
|
py
|
Python
|
src/zebrafish_ec_migration/pipelines/cell_trajectory_analysis_pipeline/compute_trajectory_features.py
|
wgiese/zebrafish_ec_migration
|
6c0e83716f4b9dcf8ca67528ae1efba031c75117
|
[
"Apache-2.0"
] | null | null | null |
src/zebrafish_ec_migration/pipelines/cell_trajectory_analysis_pipeline/compute_trajectory_features.py
|
wgiese/zebrafish_ec_migration
|
6c0e83716f4b9dcf8ca67528ae1efba031c75117
|
[
"Apache-2.0"
] | null | null | null |
src/zebrafish_ec_migration/pipelines/cell_trajectory_analysis_pipeline/compute_trajectory_features.py
|
wgiese/zebrafish_ec_migration
|
6c0e83716f4b9dcf8ca67528ae1efba031c75117
|
[
"Apache-2.0"
] | null | null | null |
import pandas as pd
from typing import Dict, List
import numpy as np
import pylab as plt
def compute_trajectory_features_set1(aligned_trajector_key_file: pd.DataFrame, parameters: Dict, start_time, end_time):
features_df = pd.DataFrame()
data_statistics_df = pd.DataFrame()
counter = 0
frame_interval = 12.0
time_interval = frame_interval*10.0/60.0
for fish_number in aligned_trajector_key_file["fish_number"].unique():
if (np.isnan(fish_number)):
continue
df_single_fish_all_groups = aligned_trajector_key_file[aligned_trajector_key_file['fish_number'] == fish_number]
for analysis_group in df_single_fish_all_groups["analysis_group"].unique():
df_single_fish = df_single_fish_all_groups[df_single_fish_all_groups["analysis_group"] == analysis_group]
movement_data = pd.DataFrame(data=[],
columns=["x", "y", "z", "frame", "link_id", "object_id", "vessel_type"])
for index, row in df_single_fish.iterrows():
object_data = pd.read_csv(row["object_data"])
link_data = pd.read_csv(row["link_data"])
movement_data = pd.merge(object_data, link_data, on='object_id')
for link_id in movement_data["link_id"].unique():
movement_data_ = movement_data[movement_data["link_id"]==link_id]
dist = movement_data_.diff(frame_interval).fillna(np.nan)
dist_step = movement_data_.diff(1).fillna(np.nan)
movement_data_['step_size'] = np.sqrt(dist.x**2 + dist.y**2)
movement_data_['step_size_x'] = dist_step.x
movement_data_['step_size_y'] = dist_step.y
movement_data_['velocity_micron_per_h'] = np.sqrt(dist.x**2 + dist.y**2)/time_interval
movement_data_['vd_velocity_micron_per_h'] = dist.y/time_interval
#movement_data_['step_size_y'] = dist.y
movement_data_['fish_number'] = fish_number
movement_data_['vessel_type'] = row['vessel_type']
movement_data_['analysis_group'] = analysis_group
movement_data_['time_in_hpf'] = 24.0 + 10.0 * (movement_data_['frame'] - 1)/60.0
movement_data_['time_in_min'] = 10 * (movement_data_['frame'] - 1)
if len(features_df.columns) > 1:
features_df = movement_data_.append(features_df)
else:
features_df = movement_data_.copy()
return features_df#, data_statistics_df
def compute_trajectory_features_set2(aligned_trajector_key_file: pd.DataFrame, parameters: Dict, start_time, end_time):
features_df = pd.DataFrame()
data_statistics_df = pd.DataFrame()
counter = 0
frame_interval = 12.0
time_interval = frame_interval*10.0/60.0
for fish_number in aligned_trajector_key_file["fish_number"].unique():
if (np.isnan(fish_number)):
continue
df_single_fish_all_groups = aligned_trajector_key_file[aligned_trajector_key_file['fish_number'] == fish_number]
for analysis_group in df_single_fish_all_groups["analysis_group"].unique():
df_single_fish = df_single_fish_all_groups[df_single_fish_all_groups["analysis_group"] == analysis_group]
movement_data = pd.DataFrame(data=[],
columns=["x", "y", "z", "frame", "link_id", "object_id", "vessel_type"])
for index, row in df_single_fish.iterrows():
object_data = pd.read_csv(row["object_data"])
link_data = pd.read_csv(row["link_data"])
movement_data = pd.merge(object_data, link_data, on='object_id')
for link_id in movement_data["link_id"].unique():
movement_data_ = movement_data[movement_data["link_id"]==link_id]
dist = movement_data_.diff(frame_interval).fillna(np.nan)
dist_step = movement_data_.diff(1).fillna(np.nan)
movement_data_['step_size'] = np.sqrt(dist.x**2 + dist.y**2)
movement_data_['step_size_x'] = dist_step.x
movement_data_['step_size_y'] = dist_step.y
movement_data_['velocity_micron_per_h'] = np.sqrt(dist.x**2 + dist.y**2)/time_interval
movement_data_['vd_velocity_micron_per_h'] = dist.y/time_interval
#movement_data_['step_size_y'] = dist.y
movement_data_['fish_number'] = fish_number
movement_data_['vessel_type'] = row['vessel_type']
movement_data_['analysis_group'] = analysis_group
movement_data_['time_in_hpf'] = 24.0 + 10.0 * (movement_data_['frame'] - 1)/60.0
movement_data_['time_in_min'] = 10 * (movement_data_['frame'] - 1)
if len(features_df.columns) > 1:
features_df = movement_data_.append(features_df)
else:
features_df = movement_data_.copy()
return features_df#, data_statistics_df
| 47.763636
| 120
| 0.611153
| 661
| 5,254
| 4.416036
| 0.134644
| 0.189106
| 0.049332
| 0.063035
| 0.953751
| 0.953751
| 0.953751
| 0.953751
| 0.953751
| 0.953751
| 0
| 0.01589
| 0.281309
| 5,254
| 110
| 121
| 47.763636
| 0.75715
| 0.022078
| 0
| 0.923077
| 0
| 0
| 0.110224
| 0.017527
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025641
| false
| 0
| 0.051282
| 0
| 0.102564
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f0dd70671137f9276770eb4324b9867b67865ca3
| 51,458
|
py
|
Python
|
tests/test_number_cleaner.py
|
theseus-automl/gorgona
|
a7366d54430caa5a038488432fb93702e1cb83b8
|
[
"Apache-2.0"
] | 1
|
2021-12-12T10:47:00.000Z
|
2021-12-12T10:47:00.000Z
|
tests/test_number_cleaner.py
|
theseus-automl/gorgona
|
a7366d54430caa5a038488432fb93702e1cb83b8
|
[
"Apache-2.0"
] | 5
|
2021-12-12T10:45:04.000Z
|
2022-01-17T07:51:14.000Z
|
tests/test_number_cleaner.py
|
theseus-automl/gorgona
|
a7366d54430caa5a038488432fb93702e1cb83b8
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from gorgona.stages.cleaners import NumberCleaner
@pytest.fixture()
def setup_number_cleaner():
nc = NumberCleaner(
'',
'',
)
return nc
def test_positive_integer_single_digit_single_digit(setup_number_cleaner):
assert setup_number_cleaner("7") == ""
def test_positive_integer_single_digit_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("3") == ""
def test_positive_integer_single_digit_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("9'5") == ""
def test_positive_integer_single_digit_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("0'257175") == ""
def test_positive_integer_single_digit_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("9`9") == ""
def test_positive_integer_single_digit_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("0`985776") == ""
def test_positive_integer_single_digit_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("5 6") == ""
def test_positive_integer_single_digit_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("3 839118") == ""
def test_positive_integer_single_digit_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("4k6") == ""
def test_positive_integer_single_digit_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("3k504421") == ""
def test_positive_integer_single_digit_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("4к4") == ""
def test_positive_integer_single_digit_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("5к117864") == ""
def test_positive_integer_multiple_digits_single_digit(setup_number_cleaner):
assert setup_number_cleaner("774464") == ""
def test_positive_integer_multiple_digits_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("35655") == ""
def test_positive_integer_multiple_digits_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("249910'9") == ""
def test_positive_integer_multiple_digits_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("480142'838693") == ""
def test_positive_integer_multiple_digits_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("154095`1") == ""
def test_positive_integer_multiple_digits_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("85818`184705") == ""
def test_positive_integer_multiple_digits_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("306485 3") == ""
def test_positive_integer_multiple_digits_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("22721 546337") == ""
def test_positive_integer_multiple_digits_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("464830k0") == ""
def test_positive_integer_multiple_digits_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("955186k918058") == ""
def test_positive_integer_multiple_digits_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("570511к2") == ""
def test_positive_integer_multiple_digits_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("564964к869484") == ""
def test_negative_integer_single_digit_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-4") == ""
def test_negative_integer_single_digit_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-5") == ""
def test_negative_integer_single_digit_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-0'0") == ""
def test_negative_integer_single_digit_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-8'803962") == ""
def test_negative_integer_single_digit_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-0`5") == ""
def test_negative_integer_single_digit_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-7`895475") == ""
def test_negative_integer_single_digit_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-9 8") == ""
def test_negative_integer_single_digit_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-8 551966") == ""
def test_negative_integer_single_digit_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-2k5") == ""
def test_negative_integer_single_digit_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-3k484318") == ""
def test_negative_integer_single_digit_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-2к5") == ""
def test_negative_integer_single_digit_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-3к283697") == ""
def test_negative_integer_multiple_digits_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-138166") == ""
def test_negative_integer_multiple_digits_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-94352") == ""
def test_negative_integer_multiple_digits_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-473778'5") == ""
def test_negative_integer_multiple_digits_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-787864'453129") == ""
def test_negative_integer_multiple_digits_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-911004`4") == ""
def test_negative_integer_multiple_digits_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-392620`715189") == ""
def test_negative_integer_multiple_digits_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-908466 6") == ""
def test_negative_integer_multiple_digits_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-279418 645330") == ""
def test_negative_integer_multiple_digits_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-591608k5") == ""
def test_negative_integer_multiple_digits_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-997435k133244") == ""
def test_negative_integer_multiple_digits_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-172174к1") == ""
def test_negative_integer_multiple_digits_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-733910к513370") == ""
def test_left_text_positive_integer_single_digit_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 4") == "hello "
def test_left_text_positive_integer_single_digit_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 7") == "hello "
def test_left_text_positive_integer_single_digit_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 3'5") == "hello "
def test_left_text_positive_integer_single_digit_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 1'414237") == "hello "
def test_left_text_positive_integer_single_digit_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 2`5") == "hello "
def test_left_text_positive_integer_single_digit_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 6`792669") == "hello "
def test_left_text_positive_integer_single_digit_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 8 6") == "hello "
def test_left_text_positive_integer_single_digit_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 4 732535") == "hello "
def test_left_text_positive_integer_single_digit_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 7k2") == "hello "
def test_left_text_positive_integer_single_digit_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 9k798422") == "hello "
def test_left_text_positive_integer_single_digit_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 0к2") == "hello "
def test_left_text_positive_integer_single_digit_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 6к449708") == "hello "
def test_left_text_positive_integer_multiple_digits_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 84908") == "hello "
def test_left_text_positive_integer_multiple_digits_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 434178") == "hello "
def test_left_text_positive_integer_multiple_digits_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 580178'5") == "hello "
def test_left_text_positive_integer_multiple_digits_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 403087'446030") == "hello "
def test_left_text_positive_integer_multiple_digits_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 99510`9") == "hello "
def test_left_text_positive_integer_multiple_digits_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 880343`699877") == "hello "
def test_left_text_positive_integer_multiple_digits_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 525007 2") == "hello "
def test_left_text_positive_integer_multiple_digits_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 872947 296824") == "hello "
def test_left_text_positive_integer_multiple_digits_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 450966k4") == "hello "
def test_left_text_positive_integer_multiple_digits_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 993633k963503") == "hello "
def test_left_text_positive_integer_multiple_digits_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 902081к2") == "hello "
def test_left_text_positive_integer_multiple_digits_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 398410к5738") == "hello "
def test_left_text_negative_integer_single_digit_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -6") == "hello "
def test_left_text_negative_integer_single_digit_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -6") == "hello "
def test_left_text_negative_integer_single_digit_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -6'2") == "hello "
def test_left_text_negative_integer_single_digit_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -3'759377") == "hello "
def test_left_text_negative_integer_single_digit_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -7`1") == "hello "
def test_left_text_negative_integer_single_digit_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -1`502604") == "hello "
def test_left_text_negative_integer_single_digit_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -2 3") == "hello "
def test_left_text_negative_integer_single_digit_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -1 393569") == "hello "
def test_left_text_negative_integer_single_digit_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -6k3") == "hello "
def test_left_text_negative_integer_single_digit_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -1k432422") == "hello "
def test_left_text_negative_integer_single_digit_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -5к5") == "hello "
def test_left_text_negative_integer_single_digit_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -1к68404") == "hello "
def test_left_text_negative_integer_multiple_digits_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -518862") == "hello "
def test_left_text_negative_integer_multiple_digits_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -311825") == "hello "
def test_left_text_negative_integer_multiple_digits_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -13646'6") == "hello "
def test_left_text_negative_integer_multiple_digits_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -155588'658068") == "hello "
def test_left_text_negative_integer_multiple_digits_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -902010`6") == "hello "
def test_left_text_negative_integer_multiple_digits_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -339050`817304") == "hello "
def test_left_text_negative_integer_multiple_digits_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -923620 6") == "hello "
def test_left_text_negative_integer_multiple_digits_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -277075 908827") == "hello "
def test_left_text_negative_integer_multiple_digits_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -770630k5") == "hello "
def test_left_text_negative_integer_multiple_digits_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -543724k219469") == "hello "
def test_left_text_negative_integer_multiple_digits_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -118460к2") == "hello "
def test_left_text_negative_integer_multiple_digits_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -159072к256757") == "hello "
def test_right_text_positive_integer_single_digit_single_digit(setup_number_cleaner):
assert setup_number_cleaner("2 hello") == " hello"
def test_right_text_positive_integer_single_digit_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("1 hello") == " hello"
def test_right_text_positive_integer_single_digit_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("6'4 hello") == " hello"
def test_right_text_positive_integer_single_digit_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("3'58431 hello") == " hello"
def test_right_text_positive_integer_single_digit_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("0`5 hello") == " hello"
def test_right_text_positive_integer_single_digit_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("5`155738 hello") == " hello"
def test_right_text_positive_integer_single_digit_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("5 3 hello") == " hello"
def test_right_text_positive_integer_single_digit_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("2 912797 hello") == " hello"
def test_right_text_positive_integer_single_digit_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("5k3 hello") == " hello"
def test_right_text_positive_integer_single_digit_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("9k911768 hello") == " hello"
def test_right_text_positive_integer_single_digit_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("3к3 hello") == " hello"
def test_right_text_positive_integer_single_digit_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("3к750248 hello") == " hello"
def test_right_text_positive_integer_multiple_digits_single_digit(setup_number_cleaner):
assert setup_number_cleaner("42678 hello") == " hello"
def test_right_text_positive_integer_multiple_digits_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("215188 hello") == " hello"
def test_right_text_positive_integer_multiple_digits_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("455258'3 hello") == " hello"
def test_right_text_positive_integer_multiple_digits_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("806580'611928 hello") == " hello"
def test_right_text_positive_integer_multiple_digits_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("479352`5 hello") == " hello"
def test_right_text_positive_integer_multiple_digits_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("519252`685635 hello") == " hello"
def test_right_text_positive_integer_multiple_digits_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("928184 7 hello") == " hello"
def test_right_text_positive_integer_multiple_digits_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("489262 493403 hello") == " hello"
def test_right_text_positive_integer_multiple_digits_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("34773k1 hello") == " hello"
def test_right_text_positive_integer_multiple_digits_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("675960k827611 hello") == " hello"
def test_right_text_positive_integer_multiple_digits_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("876524к5 hello") == " hello"
def test_right_text_positive_integer_multiple_digits_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("55243к431074 hello") == " hello"
def test_right_text_negative_integer_single_digit_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-7 hello") == " hello"
def test_right_text_negative_integer_single_digit_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-1 hello") == " hello"
def test_right_text_negative_integer_single_digit_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-5'2 hello") == " hello"
def test_right_text_negative_integer_single_digit_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-9'814320 hello") == " hello"
def test_right_text_negative_integer_single_digit_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-0`8 hello") == " hello"
def test_right_text_negative_integer_single_digit_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-3`877194 hello") == " hello"
def test_right_text_negative_integer_single_digit_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-8 6 hello") == " hello"
def test_right_text_negative_integer_single_digit_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-3 873345 hello") == " hello"
def test_right_text_negative_integer_single_digit_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-8k9 hello") == " hello"
def test_right_text_negative_integer_single_digit_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-5k346049 hello") == " hello"
def test_right_text_negative_integer_single_digit_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-4к6 hello") == " hello"
def test_right_text_negative_integer_single_digit_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-9к703473 hello") == " hello"
def test_right_text_negative_integer_multiple_digits_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-190239 hello") == " hello"
def test_right_text_negative_integer_multiple_digits_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-839965 hello") == " hello"
def test_right_text_negative_integer_multiple_digits_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-517738'9 hello") == " hello"
def test_right_text_negative_integer_multiple_digits_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-764801'614671 hello") == " hello"
def test_right_text_negative_integer_multiple_digits_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-634963`9 hello") == " hello"
def test_right_text_negative_integer_multiple_digits_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-372948`939025 hello") == " hello"
def test_right_text_negative_integer_multiple_digits_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-760889 7 hello") == " hello"
def test_right_text_negative_integer_multiple_digits_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-7831 504330 hello") == " hello"
def test_right_text_negative_integer_multiple_digits_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-837557k3 hello") == " hello"
def test_right_text_negative_integer_multiple_digits_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-195729k572621 hello") == " hello"
def test_right_text_negative_integer_multiple_digits_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("-355848к0 hello") == " hello"
def test_right_text_negative_integer_multiple_digits_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("-665426к392704 hello") == " hello"
def test_both_text_positive_integer_single_digit_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 4 world") == "hello world"
def test_both_text_positive_integer_single_digit_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 8 world") == "hello world"
def test_both_text_positive_integer_single_digit_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 6'2 world") == "hello world"
def test_both_text_positive_integer_single_digit_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 3'622671 world") == "hello world"
def test_both_text_positive_integer_single_digit_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 6`0 world") == "hello world"
def test_both_text_positive_integer_single_digit_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 8`757195 world") == "hello world"
def test_both_text_positive_integer_single_digit_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 0 1 world") == "hello world"
def test_both_text_positive_integer_single_digit_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 7 862462 world") == "hello world"
def test_both_text_positive_integer_single_digit_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 8k5 world") == "hello world"
def test_both_text_positive_integer_single_digit_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 3k314471 world") == "hello world"
def test_both_text_positive_integer_single_digit_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 2к5 world") == "hello world"
def test_both_text_positive_integer_single_digit_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 9к486783 world") == "hello world"
def test_both_text_positive_integer_multiple_digits_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 805686 world") == "hello world"
def test_both_text_positive_integer_multiple_digits_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 369355 world") == "hello world"
def test_both_text_positive_integer_multiple_digits_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 163343'0 world") == "hello world"
def test_both_text_positive_integer_multiple_digits_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 461408'736785 world") == "hello world"
def test_both_text_positive_integer_multiple_digits_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 864015`2 world") == "hello world"
def test_both_text_positive_integer_multiple_digits_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 647078`653487 world") == "hello world"
def test_both_text_positive_integer_multiple_digits_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 222917 9 world") == "hello world"
def test_both_text_positive_integer_multiple_digits_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 564211 641276 world") == "hello world"
def test_both_text_positive_integer_multiple_digits_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 440821k8 world") == "hello world"
def test_both_text_positive_integer_multiple_digits_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 845780k860446 world") == "hello world"
def test_both_text_positive_integer_multiple_digits_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello 81289к1 world") == "hello world"
def test_both_text_positive_integer_multiple_digits_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello 146234к484167 world") == "hello world"
def test_both_text_negative_integer_single_digit_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -4 world") == "hello world"
def test_both_text_negative_integer_single_digit_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -0 world") == "hello world"
def test_both_text_negative_integer_single_digit_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -4'9 world") == "hello world"
def test_both_text_negative_integer_single_digit_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -5'387080 world") == "hello world"
def test_both_text_negative_integer_single_digit_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -3`8 world") == "hello world"
def test_both_text_negative_integer_single_digit_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -0`385330 world") == "hello world"
def test_both_text_negative_integer_single_digit_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -7 7 world") == "hello world"
def test_both_text_negative_integer_single_digit_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -1 245555 world") == "hello world"
def test_both_text_negative_integer_single_digit_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -4k4 world") == "hello world"
def test_both_text_negative_integer_single_digit_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -7k737481 world") == "hello world"
def test_both_text_negative_integer_single_digit_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -3к8 world") == "hello world"
def test_both_text_negative_integer_single_digit_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -4к979649 world") == "hello world"
def test_both_text_negative_integer_multiple_digits_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -579549 world") == "hello world"
def test_both_text_negative_integer_multiple_digits_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -521868 world") == "hello world"
def test_both_text_negative_integer_multiple_digits_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -494030'8 world") == "hello world"
def test_both_text_negative_integer_multiple_digits_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -997018'388418 world") == "hello world"
def test_both_text_negative_integer_multiple_digits_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -48935`6 world") == "hello world"
def test_both_text_negative_integer_multiple_digits_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -115491`848265 world") == "hello world"
def test_both_text_negative_integer_multiple_digits_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -373023 5 world") == "hello world"
def test_both_text_negative_integer_multiple_digits_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -526547 383697 world") == "hello world"
def test_both_text_negative_integer_multiple_digits_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -304461k5 world") == "hello world"
def test_both_text_negative_integer_multiple_digits_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -308120k521264 world") == "hello world"
def test_both_text_negative_integer_multiple_digits_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("hello -230268к9 world") == "hello world"
def test_both_text_negative_integer_multiple_digits_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("hello -695525к628100 world") == "hello world"
def test_inside_text_positive_integer_single_digit_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he4llo") == "he4llo"
def test_inside_text_positive_integer_single_digit_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he8llo") == "he8llo"
def test_inside_text_positive_integer_single_digit_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he0'8llo") == "he0'8llo"
def test_inside_text_positive_integer_single_digit_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he8'503290llo") == "he8'503290llo"
def test_inside_text_positive_integer_single_digit_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he3`3llo") == "he3`3llo"
def test_inside_text_positive_integer_single_digit_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he0`179192llo") == "he0`179192llo"
def test_inside_text_positive_integer_single_digit_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he2 4llo") == "he2 4llo"
def test_inside_text_positive_integer_single_digit_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he3 135087llo") == "he3 135087llo"
def test_inside_text_positive_integer_single_digit_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he8k4llo") == "he8k4llo"
def test_inside_text_positive_integer_single_digit_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he0k657610llo") == "he0k657610llo"
def test_inside_text_positive_integer_single_digit_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he9к2llo") == "he9к2llo"
def test_inside_text_positive_integer_single_digit_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he6к839529llo") == "he6к839529llo"
def test_inside_text_positive_integer_multiple_digits_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he513934llo") == "he513934llo"
def test_inside_text_positive_integer_multiple_digits_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he424141llo") == "he424141llo"
def test_inside_text_positive_integer_multiple_digits_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he757949'6llo") == "he757949'6llo"
def test_inside_text_positive_integer_multiple_digits_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he650035'989071llo") == "he650035'989071llo"
def test_inside_text_positive_integer_multiple_digits_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he849767`6llo") == "he849767`6llo"
def test_inside_text_positive_integer_multiple_digits_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he234327`915339llo") == "he234327`915339llo"
def test_inside_text_positive_integer_multiple_digits_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he703293 5llo") == "he703293 5llo"
def test_inside_text_positive_integer_multiple_digits_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he409856 70023llo") == "he409856 70023llo"
def test_inside_text_positive_integer_multiple_digits_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he744620k6llo") == "he744620k6llo"
def test_inside_text_positive_integer_multiple_digits_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he743290k231362llo") == "he743290k231362llo"
def test_inside_text_positive_integer_multiple_digits_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he791511к3llo") == "he791511к3llo"
def test_inside_text_positive_integer_multiple_digits_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he401092к788202llo") == "he401092к788202llo"
def test_inside_text_negative_integer_single_digit_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he-4llo") == "he-4llo"
def test_inside_text_negative_integer_single_digit_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he-8llo") == "he-8llo"
def test_inside_text_negative_integer_single_digit_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he-3'3llo") == "he-3'3llo"
def test_inside_text_negative_integer_single_digit_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he-4'290601llo") == "he-4'290601llo"
def test_inside_text_negative_integer_single_digit_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he-7`0llo") == "he-7`0llo"
def test_inside_text_negative_integer_single_digit_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he-6`707325llo") == "he-6`707325llo"
def test_inside_text_negative_integer_single_digit_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he-9 3llo") == "he-9 3llo"
def test_inside_text_negative_integer_single_digit_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he-0 183754llo") == "he-0 183754llo"
def test_inside_text_negative_integer_single_digit_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he-1k4llo") == "he-1k4llo"
def test_inside_text_negative_integer_single_digit_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he-3k878581llo") == "he-3k878581llo"
def test_inside_text_negative_integer_single_digit_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he-0к0llo") == "he-0к0llo"
def test_inside_text_negative_integer_single_digit_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he-6к377555llo") == "he-6к377555llo"
def test_inside_text_negative_integer_multiple_digits_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he-598986llo") == "he-598986llo"
def test_inside_text_negative_integer_multiple_digits_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he-393398llo") == "he-393398llo"
def test_inside_text_negative_integer_multiple_digits_quote_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he-890636'7llo") == "he-890636'7llo"
def test_inside_text_negative_integer_multiple_digits_quote_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he-834451'288314llo") == "he-834451'288314llo"
def test_inside_text_negative_integer_multiple_digits_apostrophe_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he-347856`8llo") == "he-347856`8llo"
def test_inside_text_negative_integer_multiple_digits_apostrophe_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he-504475`759252llo") == "he-504475`759252llo"
def test_inside_text_negative_integer_multiple_digits_space_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he-349749 9llo") == "he-349749 9llo"
def test_inside_text_negative_integer_multiple_digits_space_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he-184038 68144llo") == "he-184038 68144llo"
def test_inside_text_negative_integer_multiple_digits_eng_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he-289290k6llo") == "he-289290k6llo"
def test_inside_text_negative_integer_multiple_digits_eng_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he-964399k733553llo") == "he-964399k733553llo"
def test_inside_text_negative_integer_multiple_digits_rus_k_single_digit(setup_number_cleaner):
assert setup_number_cleaner("he-63989к5llo") == "he-63989к5llo"
def test_inside_text_negative_integer_multiple_digits_rus_k_multiple_digits(setup_number_cleaner):
assert setup_number_cleaner("he-403175к774771llo") == "he-403175к774771llo"
def test_positive_float_single_digit_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("2.9") == ""
def test_positive_float_single_digit_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("8.569333") == ""
def test_positive_float_single_digit_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("5,0") == ""
def test_positive_float_single_digit_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("1,780518") == ""
def test_positive_float_multiple_digits_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("785313.5") == ""
def test_positive_float_multiple_digits_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("537221.74655") == ""
def test_positive_float_multiple_digits_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("391240,8") == ""
def test_positive_float_multiple_digits_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("181004,460352") == ""
def test_negative_float_single_digit_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("-9.6") == ""
def test_negative_float_single_digit_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("-8.258030") == ""
def test_negative_float_single_digit_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("-7,1") == ""
def test_negative_float_single_digit_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("-0,885164") == ""
def test_negative_float_multiple_digits_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("-864605.4") == ""
def test_negative_float_multiple_digits_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("-355839.416791") == ""
def test_negative_float_multiple_digits_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("-578243,4") == ""
def test_negative_float_multiple_digits_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("-98767,817853") == ""
def test_left_text_positive_float_single_digit_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 4.6") == "hello "
def test_left_text_positive_float_single_digit_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 1.74914") == "hello "
def test_left_text_positive_float_single_digit_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 3,5") == "hello "
def test_left_text_positive_float_single_digit_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 2,8995") == "hello "
def test_left_text_positive_float_multiple_digits_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 128684.7") == "hello "
def test_left_text_positive_float_multiple_digits_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 832606.932249") == "hello "
def test_left_text_positive_float_multiple_digits_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 377802,4") == "hello "
def test_left_text_positive_float_multiple_digits_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 762367,135153") == "hello "
def test_left_text_negative_float_single_digit_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -1.8") == "hello "
def test_left_text_negative_float_single_digit_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -5.792708") == "hello "
def test_left_text_negative_float_single_digit_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -2,5") == "hello "
def test_left_text_negative_float_single_digit_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -5,888953") == "hello "
def test_left_text_negative_float_multiple_digits_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -486940.5") == "hello "
def test_left_text_negative_float_multiple_digits_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -716193.653169") == "hello "
def test_left_text_negative_float_multiple_digits_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -892150,7") == "hello "
def test_left_text_negative_float_multiple_digits_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -825361,420340") == "hello "
def test_right_text_positive_float_single_digit_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("9.7 hello") == " hello"
def test_right_text_positive_float_single_digit_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("8.668371 hello") == " hello"
def test_right_text_positive_float_single_digit_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("6,9 hello") == " hello"
def test_right_text_positive_float_single_digit_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("9,934089 hello") == " hello"
def test_right_text_positive_float_multiple_digits_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("243369.1 hello") == " hello"
def test_right_text_positive_float_multiple_digits_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("424756.17786 hello") == " hello"
def test_right_text_positive_float_multiple_digits_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("922173,3 hello") == " hello"
def test_right_text_positive_float_multiple_digits_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("829857,999977 hello") == " hello"
def test_right_text_negative_float_single_digit_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("-1.8 hello") == " hello"
def test_right_text_negative_float_single_digit_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("-5.743926 hello") == " hello"
def test_right_text_negative_float_single_digit_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("-1,9 hello") == " hello"
def test_right_text_negative_float_single_digit_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("-3,740022 hello") == " hello"
def test_right_text_negative_float_multiple_digits_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("-746442.5 hello") == " hello"
def test_right_text_negative_float_multiple_digits_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("-796358.785568 hello") == " hello"
def test_right_text_negative_float_multiple_digits_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("-162965,8 hello") == " hello"
def test_right_text_negative_float_multiple_digits_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("-510271,12306 hello") == " hello"
def test_both_text_positive_float_single_digit_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 2.6 world") == "hello world"
def test_both_text_positive_float_single_digit_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 6.756683 world") == "hello world"
def test_both_text_positive_float_single_digit_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 6,3 world") == "hello world"
def test_both_text_positive_float_single_digit_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 1,84108 world") == "hello world"
def test_both_text_positive_float_multiple_digits_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 430035.4 world") == "hello world"
def test_both_text_positive_float_multiple_digits_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 547739.554345 world") == "hello world"
def test_both_text_positive_float_multiple_digits_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 26171,1 world") == "hello world"
def test_both_text_positive_float_multiple_digits_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello 666557,952575 world") == "hello world"
def test_both_text_negative_float_single_digit_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -1.0 world") == "hello world"
def test_both_text_negative_float_single_digit_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -1.445504 world") == "hello world"
def test_both_text_negative_float_single_digit_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -7,7 world") == "hello world"
def test_both_text_negative_float_single_digit_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -3,87658 world") == "hello world"
def test_both_text_negative_float_multiple_digits_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -477476.4 world") == "hello world"
def test_both_text_negative_float_multiple_digits_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -541300.867811 world") == "hello world"
def test_both_text_negative_float_multiple_digits_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -708842,4 world") == "hello world"
def test_both_text_negative_float_multiple_digits_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("hello -741041,952275 world") == "hello world"
def test_inside_text_positive_float_single_digit_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("he4.9llo") == "he4.9llo"
def test_inside_text_positive_float_single_digit_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("he4.605648llo") == "he4.605648llo"
def test_inside_text_positive_float_single_digit_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("he7,6llo") == "he7,6llo"
def test_inside_text_positive_float_single_digit_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("he1,640808llo") == "he1,640808llo"
def test_inside_text_positive_float_multiple_digits_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("he311010.5llo") == "he311010.5llo"
def test_inside_text_positive_float_multiple_digits_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("he593407.960145llo") == "he593407.960145llo"
def test_inside_text_positive_float_multiple_digits_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("he318574,7llo") == "he318574,7llo"
def test_inside_text_positive_float_multiple_digits_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("he113354,321762llo") == "he113354,321762llo"
def test_inside_text_negative_float_single_digit_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("he-1.7llo") == "he-1.7llo"
def test_inside_text_negative_float_single_digit_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("he-5.347666llo") == "he-5.347666llo"
def test_inside_text_negative_float_single_digit_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("he-1,5llo") == "he-1,5llo"
def test_inside_text_negative_float_single_digit_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("he-0,785082llo") == "he-0,785082llo"
def test_inside_text_negative_float_multiple_digits_dot_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("he-19847.2llo") == "he-19847.2llo"
def test_inside_text_negative_float_multiple_digits_dot_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("he-163691.435539llo") == "he-163691.435539llo"
def test_inside_text_negative_float_multiple_digits_comma_with_single_digit_fraction(setup_number_cleaner):
assert setup_number_cleaner("he-416740,2llo") == "he-416740,2llo"
def test_inside_text_negative_float_multiple_digits_comma_with_multiple_digits_fraction(setup_number_cleaner):
assert setup_number_cleaner("he-117470,870470llo") == "he-117470,870470llo"
| 39.766615
| 110
| 0.837324
| 7,050
| 51,458
| 5.511064
| 0.05234
| 0.181479
| 0.296965
| 0.197668
| 0.922606
| 0.921628
| 0.91962
| 0.914344
| 0.903663
| 0.891283
| 0
| 0.052841
| 0.086459
| 51,458
| 1,293
| 111
| 39.79737
| 0.77366
| 0
| 0
| 0.006163
| 0
| 0
| 0.13067
| 0
| 0
| 0
| 0
| 0
| 0.493066
| 1
| 0.494607
| false
| 0
| 0.003082
| 0
| 0.49923
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
9bda521421d444cb1361c910a70adc7a47fdcd9f
| 80,233
|
py
|
Python
|
sympy/integrals/rubi/rubi_tests/tests/test_hyperbolic_sine.py
|
Michal-Gagala/sympy
|
3cc756c2af73b5506102abaeefd1b654e286e2c8
|
[
"MIT"
] | null | null | null |
sympy/integrals/rubi/rubi_tests/tests/test_hyperbolic_sine.py
|
Michal-Gagala/sympy
|
3cc756c2af73b5506102abaeefd1b654e286e2c8
|
[
"MIT"
] | null | null | null |
sympy/integrals/rubi/rubi_tests/tests/test_hyperbolic_sine.py
|
Michal-Gagala/sympy
|
3cc756c2af73b5506102abaeefd1b654e286e2c8
|
[
"MIT"
] | null | null | null |
import sys
from sympy.external import import_module
matchpy = import_module("matchpy")
if not matchpy:
#bin/test will not execute any tests now
disabled = True
if sys.version_info[:2] < (3, 6):
disabled = True
from sympy.integrals.rubi.utility_function import (
sympy_op_factory, Int, Sum, Set, With, Module, Scan, MapAnd, FalseQ,
ZeroQ, NegativeQ, NonzeroQ, FreeQ, NFreeQ, List, Log, PositiveQ,
PositiveIntegerQ, NegativeIntegerQ, IntegerQ, IntegersQ,
ComplexNumberQ, PureComplexNumberQ, RealNumericQ, PositiveOrZeroQ,
NegativeOrZeroQ, FractionOrNegativeQ, NegQ, Equal, Unequal, IntPart,
FracPart, RationalQ, ProductQ, SumQ, NonsumQ, Subst, First, Rest,
SqrtNumberQ, SqrtNumberSumQ, LinearQ, Sqrt, ArcCosh, Coefficient,
Denominator, Hypergeometric2F1, Not, Simplify, FractionalPart,
IntegerPart, AppellF1, EllipticPi, EllipticE, EllipticF, ArcTan,
ArcCot, ArcCoth, ArcTanh, ArcSin, ArcSinh, ArcCos, ArcCsc, ArcSec,
ArcCsch, ArcSech, Sinh, Tanh, Cosh, Sech, Csch, Coth, LessEqual, Less,
Greater, GreaterEqual, FractionQ, IntLinearcQ, Expand, IndependentQ,
PowerQ, IntegerPowerQ, PositiveIntegerPowerQ, FractionalPowerQ, AtomQ,
ExpQ, LogQ, Head, MemberQ, TrigQ, SinQ, CosQ, TanQ, CotQ, SecQ, CscQ,
Sin, Cos, Tan, Cot, Sec, Csc, HyperbolicQ, SinhQ, CoshQ, TanhQ, CothQ,
SechQ, CschQ, InverseTrigQ, SinCosQ, SinhCoshQ, LeafCount, Numerator,
NumberQ, NumericQ, Length, ListQ, Im, Re, InverseHyperbolicQ,
InverseFunctionQ, TrigHyperbolicFreeQ, InverseFunctionFreeQ, RealQ,
EqQ, FractionalPowerFreeQ, ComplexFreeQ, PolynomialQ, FactorSquareFree,
PowerOfLinearQ, Exponent, QuadraticQ, LinearPairQ, BinomialParts,
TrinomialParts, PolyQ, EvenQ, OddQ, PerfectSquareQ, NiceSqrtAuxQ,
NiceSqrtQ, Together, PosAux, PosQ, CoefficientList, ReplaceAll,
ExpandLinearProduct, GCD, ContentFactor, NumericFactor,
NonnumericFactors, MakeAssocList, GensymSubst, KernelSubst,
ExpandExpression, Apart, SmartApart, MatchQ,
PolynomialQuotientRemainder, FreeFactors, NonfreeFactors,
RemoveContentAux, RemoveContent, FreeTerms, NonfreeTerms,
ExpandAlgebraicFunction, CollectReciprocals, ExpandCleanup,
AlgebraicFunctionQ, Coeff, LeadTerm, RemainingTerms, LeadFactor,
RemainingFactors, LeadBase, LeadDegree, Numer, Denom, hypergeom, Expon,
MergeMonomials, PolynomialDivide, BinomialQ, TrinomialQ,
GeneralizedBinomialQ, GeneralizedTrinomialQ, FactorSquareFreeList,
PerfectPowerTest, SquareFreeFactorTest, RationalFunctionQ,
RationalFunctionFactors, NonrationalFunctionFactors, Reverse,
RationalFunctionExponents, RationalFunctionExpand, ExpandIntegrand,
SimplerQ, SimplerSqrtQ, SumSimplerQ, BinomialDegree, TrinomialDegree,
CancelCommonFactors, SimplerIntegrandQ, GeneralizedBinomialDegree,
GeneralizedBinomialParts, GeneralizedTrinomialDegree,
GeneralizedTrinomialParts, MonomialQ, MonomialSumQ,
MinimumMonomialExponent, MonomialExponent, LinearMatchQ,
PowerOfLinearMatchQ, QuadraticMatchQ, CubicMatchQ, BinomialMatchQ,
TrinomialMatchQ, GeneralizedBinomialMatchQ, GeneralizedTrinomialMatchQ,
QuotientOfLinearsMatchQ, PolynomialTermQ, PolynomialTerms,
NonpolynomialTerms, PseudoBinomialParts, NormalizePseudoBinomial,
PseudoBinomialPairQ, PseudoBinomialQ, PolynomialGCD, PolyGCD,
AlgebraicFunctionFactors, NonalgebraicFunctionFactors,
QuotientOfLinearsP, QuotientOfLinearsParts, QuotientOfLinearsQ,
Flatten, Sort, AbsurdNumberQ, AbsurdNumberFactors,
NonabsurdNumberFactors, SumSimplerAuxQ, Prepend, Drop,
CombineExponents, FactorInteger, FactorAbsurdNumber,
SubstForInverseFunction, SubstForFractionalPower,
SubstForFractionalPowerOfQuotientOfLinears,
FractionalPowerOfQuotientOfLinears, SubstForFractionalPowerQ,
SubstForFractionalPowerAuxQ, FractionalPowerOfSquareQ,
FractionalPowerSubexpressionQ, Apply, FactorNumericGcd,
MergeableFactorQ, MergeFactor, MergeFactors, TrigSimplifyQ,
TrigSimplify, TrigSimplifyRecur, Order, FactorOrder, Smallest,
OrderedQ, MinimumDegree, PositiveFactors, Sign, NonpositiveFactors,
PolynomialInAuxQ, PolynomialInQ, ExponentInAux, ExponentIn,
PolynomialInSubstAux, PolynomialInSubst, Distrib, DistributeDegree,
FunctionOfPower, DivideDegreesOfFactors, MonomialFactor, FullSimplify,
FunctionOfLinearSubst, FunctionOfLinear, NormalizeIntegrand,
NormalizeIntegrandAux, NormalizeIntegrandFactor,
NormalizeIntegrandFactorBase, NormalizeTogether,
NormalizeLeadTermSigns, AbsorbMinusSign, NormalizeSumFactors,
SignOfFactor, NormalizePowerOfLinear, SimplifyIntegrand, SimplifyTerm,
TogetherSimplify, SmartSimplify, SubstForExpn, ExpandToSum, UnifySum,
UnifyTerms, UnifyTerm, CalculusQ, FunctionOfInverseLinear,
PureFunctionOfSinhQ, PureFunctionOfTanhQ, PureFunctionOfCoshQ,
IntegerQuotientQ, OddQuotientQ, EvenQuotientQ, FindTrigFactor,
FunctionOfSinhQ, FunctionOfCoshQ, OddHyperbolicPowerQ, FunctionOfTanhQ,
FunctionOfTanhWeight, FunctionOfHyperbolicQ, SmartNumerator,
SmartDenominator, SubstForAux, ActivateTrig, ExpandTrig, TrigExpand,
SubstForTrig, SubstForHyperbolic, InertTrigFreeQ, LCM,
SubstForFractionalPowerOfLinear, FractionalPowerOfLinear,
InverseFunctionOfLinear, InertTrigQ, InertReciprocalQ, DeactivateTrig,
FixInertTrigFunction, DeactivateTrigAux, PowerOfInertTrigSumQ,
PiecewiseLinearQ, KnownTrigIntegrandQ, KnownSineIntegrandQ,
KnownTangentIntegrandQ, KnownCotangentIntegrandQ,
KnownSecantIntegrandQ, TryPureTanSubst, TryTanhSubst, TryPureTanhSubst,
AbsurdNumberGCD, AbsurdNumberGCDList, ExpandTrigExpand,
ExpandTrigReduce, ExpandTrigReduceAux, NormalizeTrig, TrigToExp,
ExpandTrigToExp, TrigReduce, FunctionOfTrig, AlgebraicTrigFunctionQ,
FunctionOfHyperbolic, FunctionOfQ, FunctionOfExpnQ, PureFunctionOfSinQ,
PureFunctionOfCosQ, PureFunctionOfTanQ, PureFunctionOfCotQ,
FunctionOfCosQ, FunctionOfSinQ, OddTrigPowerQ, FunctionOfTanQ,
FunctionOfTanWeight, FunctionOfTrigQ, FunctionOfDensePolynomialsQ,
FunctionOfLog, PowerVariableExpn, PowerVariableDegree,
PowerVariableSubst, EulerIntegrandQ, FunctionOfSquareRootOfQuadratic,
SquareRootOfQuadraticSubst, Divides, EasyDQ, ProductOfLinearPowersQ,
Rt, NthRoot, AtomBaseQ, SumBaseQ, NegSumBaseQ, AllNegTermQ,
SomeNegTermQ, TrigSquareQ, RtAux, TrigSquare, IntSum, IntTerm, Map2,
ConstantFactor, SameQ, ReplacePart, CommonFactors,
MostMainFactorPosition, FunctionOfExponentialQ, FunctionOfExponential,
FunctionOfExponentialFunction, FunctionOfExponentialFunctionAux,
FunctionOfExponentialTest, FunctionOfExponentialTestAux, stdev,
rubi_test, If, IntQuadraticQ, IntBinomialQ, RectifyTangent,
RectifyCotangent, Inequality, Condition, Simp, SimpHelp, SplitProduct,
SplitSum, SubstFor, SubstForAux, FresnelS, FresnelC, Erfc, Erfi, Gamma,
FunctionOfTrigOfLinearQ, ElementaryFunctionQ, Complex, UnsameQ,
_SimpFixFactor, SimpFixFactor, _FixSimplify, FixSimplify,
_SimplifyAntiderivativeSum, SimplifyAntiderivativeSum,
_SimplifyAntiderivative, SimplifyAntiderivative, _TrigSimplifyAux,
TrigSimplifyAux, Cancel, Part, PolyLog, D, Dist, Sum_doit, PolynomialQuotient, Floor,
PolynomialRemainder, Factor, PolyLog, CosIntegral, SinIntegral, LogIntegral, SinhIntegral,
CoshIntegral, Rule, Erf, PolyGamma, ExpIntegralEi, ExpIntegralE, LogGamma , UtilityOperator, Factorial,
Zeta, ProductLog, DerivativeDivides, HypergeometricPFQ, IntHide, OneQ
)
from sympy.core.add import Add
from sympy.core.mod import Mod
from sympy.core.mul import Mul
from sympy.core.numbers import (Float, I, Integer)
from sympy.core.power import Pow
from sympy.core.singleton import S
from sympy.functions.elementary.complexes import Abs
from sympy.functions.elementary.miscellaneous import sqrt
from sympy.integrals.integrals import Integral as Integrate
from sympy.logic.boolalg import (And, Or)
from sympy.simplify.simplify import simplify
from sympy.integrals.rubi.symbol import WC
from sympy.core.symbol import symbols, Symbol
from sympy.functions import (sin, cos, tan, cot, csc, sec, sqrt, erf, exp, log)
from sympy.functions.elementary.hyperbolic import (acosh, asinh, atanh, acoth, acsch, asech, cosh, sinh, tanh, coth, sech, csch)
from sympy.functions.elementary.trigonometric import (atan, acsc, asin, acot, acos, asec)
from sympy.integrals.rubi.rubimain import rubi_integrate
from sympy.core.numbers import pi as Pi
a, b, c, d, e, f, m, n, x, u , k, p, r, s, t, i, j= symbols('a b c d e f m n x u k p r s t i j')
A, B, C, D, a, b, c, d, e, f, g, h, y, z, m, n, p, q, u, v, w, F = symbols('A B C D a b c d e f g h y z m n p q u v w F', )
def test_1():
assert rubi_test(rubi_integrate((c + d*x)**S(4)*sinh(a + b*x), x), x, (c + d*x)**S(4)*cosh(a + b*x)/b - S(4)*d*(c + d*x)**S(3)*sinh(a + b*x)/b**S(2) + S(12)*d**S(2)*(c + d*x)**S(2)*cosh(a + b*x)/b**S(3) - S(24)*d**S(3)*(c + d*x)*sinh(a + b*x)/b**S(4) + S(24)*d**S(4)*cosh(a + b*x)/b**S(5), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)*sinh(a + b*x), x), x, (c + d*x)**S(3)*cosh(a + b*x)/b - S(3)*d*(c + d*x)**S(2)*sinh(a + b*x)/b**S(2) + S(6)*d**S(2)*(c + d*x)*cosh(a + b*x)/b**S(3) - S(6)*d**S(3)*sinh(a + b*x)/b**S(4), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)*sinh(a + b*x), x), x, (c + d*x)**S(2)*cosh(a + b*x)/b - S(2)*d*(c + d*x)*sinh(a + b*x)/b**S(2) + S(2)*d**S(2)*cosh(a + b*x)/b**S(3), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)*sinh(a + b*x), x), x, (c + d*x)*cosh(a + b*x)/b - d*sinh(a + b*x)/b**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/(c + d*x), x), x, CoshIntegral(b*c/d + b*x)*sinh(a - b*c/d)/d + SinhIntegral(b*c/d + b*x)*cosh(a - b*c/d)/d, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/(c + d*x)**S(2), x), x, b*CoshIntegral(b*c/d + b*x)*cosh(a - b*c/d)/d**S(2) + b*SinhIntegral(b*c/d + b*x)*sinh(a - b*c/d)/d**S(2) - sinh(a + b*x)/(d*(c + d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/(c + d*x)**S(3), x), x, b**S(2)*CoshIntegral(b*c/d + b*x)*sinh(a - b*c/d)/(S(2)*d**S(3)) + b**S(2)*SinhIntegral(b*c/d + b*x)*cosh(a - b*c/d)/(S(2)*d**S(3)) - b*cosh(a + b*x)/(S(2)*d**S(2)*(c + d*x)) - sinh(a + b*x)/(S(2)*d*(c + d*x)**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(4)*sinh(a + b*x)**S(2), x), x, -(c + d*x)**S(5)/(S(10)*d) + (c + d*x)**S(4)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b) - d*(c + d*x)**S(3)*sinh(a + b*x)**S(2)/b**S(2) - d*(c + d*x)**S(3)/(S(2)*b**S(2)) + S(3)*d**S(2)*(c + d*x)**S(2)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b**S(3)) - S(3)*d**S(4)*x/(S(4)*b**S(4)) - S(3)*d**S(3)*(c + d*x)*sinh(a + b*x)**S(2)/(S(2)*b**S(4)) + S(3)*d**S(4)*sinh(a + b*x)*cosh(a + b*x)/(S(4)*b**S(5)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)*sinh(a + b*x)**S(2), x), x, -(c + d*x)**S(4)/(S(8)*d) + (c + d*x)**S(3)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b) - S(3)*c*d**S(2)*x/(S(4)*b**S(2)) - S(3)*d**S(3)*x**S(2)/(S(8)*b**S(2)) - S(3)*d*(c + d*x)**S(2)*sinh(a + b*x)**S(2)/(S(4)*b**S(2)) + S(3)*d**S(2)*(c + d*x)*sinh(a + b*x)*cosh(a + b*x)/(S(4)*b**S(3)) - S(3)*d**S(3)*sinh(a + b*x)**S(2)/(S(8)*b**S(4)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)*sinh(a + b*x)**S(2), x), x, -(c + d*x)**S(3)/(S(6)*d) + (c + d*x)**S(2)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b) - d**S(2)*x/(S(4)*b**S(2)) - d*(c + d*x)*sinh(a + b*x)**S(2)/(S(2)*b**S(2)) + d**S(2)*sinh(a + b*x)*cosh(a + b*x)/(S(4)*b**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)*sinh(a + b*x)**S(2), x), x, -c*x/S(2) - d*x**S(2)/S(4) + (c + d*x)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b) - d*sinh(a + b*x)**S(2)/(S(4)*b**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x), x), x, CoshIntegral(S(2)*b*c/d + S(2)*b*x)*cosh(S(2)*a - S(2)*b*c/d)/(S(2)*d) + SinhIntegral(S(2)*b*c/d + S(2)*b*x)*sinh(S(2)*a - S(2)*b*c/d)/(S(2)*d) - log(c + d*x)/(S(2)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**S(2), x), x, b*CoshIntegral(S(2)*b*c/d + S(2)*b*x)*sinh(S(2)*a - S(2)*b*c/d)/d**S(2) + b*SinhIntegral(S(2)*b*c/d + S(2)*b*x)*cosh(S(2)*a - S(2)*b*c/d)/d**S(2) - sinh(a + b*x)**S(2)/(d*(c + d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**S(3), x), x, b**S(2)*CoshIntegral(S(2)*b*c/d + S(2)*b*x)*cosh(S(2)*a - S(2)*b*c/d)/d**S(3) + b**S(2)*SinhIntegral(S(2)*b*c/d + S(2)*b*x)*sinh(S(2)*a - S(2)*b*c/d)/d**S(3) - b*sinh(a + b*x)*cosh(a + b*x)/(d**S(2)*(c + d*x)) - sinh(a + b*x)**S(2)/(S(2)*d*(c + d*x)**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**S(4), x), x, S(2)*b**S(3)*CoshIntegral(S(2)*b*c/d + S(2)*b*x)*sinh(S(2)*a - S(2)*b*c/d)/(S(3)*d**S(4)) + S(2)*b**S(3)*SinhIntegral(S(2)*b*c/d + S(2)*b*x)*cosh(S(2)*a - S(2)*b*c/d)/(S(3)*d**S(4)) - S(2)*b**S(2)*sinh(a + b*x)**S(2)/(S(3)*d**S(3)*(c + d*x)) - b**S(2)/(S(3)*d**S(3)*(c + d*x)) - b*sinh(a + b*x)*cosh(a + b*x)/(S(3)*d**S(2)*(c + d*x)**S(2)) - sinh(a + b*x)**S(2)/(S(3)*d*(c + d*x)**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(4)*sinh(a + b*x)**S(3), x), x, (c + d*x)**S(4)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*(c + d*x)**S(4)*cosh(a + b*x)/(S(3)*b) - S(4)*d*(c + d*x)**S(3)*sinh(a + b*x)**S(3)/(S(9)*b**S(2)) + S(8)*d*(c + d*x)**S(3)*sinh(a + b*x)/(S(3)*b**S(2)) + S(4)*d**S(2)*(c + d*x)**S(2)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(9)*b**S(3)) - S(80)*d**S(2)*(c + d*x)**S(2)*cosh(a + b*x)/(S(9)*b**S(3)) - S(8)*d**S(3)*(c + d*x)*sinh(a + b*x)**S(3)/(S(27)*b**S(4)) + S(160)*d**S(3)*(c + d*x)*sinh(a + b*x)/(S(9)*b**S(4)) + S(8)*d**S(4)*cosh(a + b*x)**S(3)/(S(81)*b**S(5)) - S(488)*d**S(4)*cosh(a + b*x)/(S(27)*b**S(5)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)*sinh(a + b*x)**S(3), x), x, (c + d*x)**S(3)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*(c + d*x)**S(3)*cosh(a + b*x)/(S(3)*b) - d*(c + d*x)**S(2)*sinh(a + b*x)**S(3)/(S(3)*b**S(2)) + S(2)*d*(c + d*x)**S(2)*sinh(a + b*x)/b**S(2) + S(2)*d**S(2)*(c + d*x)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(9)*b**S(3)) - S(40)*d**S(2)*(c + d*x)*cosh(a + b*x)/(S(9)*b**S(3)) - S(2)*d**S(3)*sinh(a + b*x)**S(3)/(S(27)*b**S(4)) + S(40)*d**S(3)*sinh(a + b*x)/(S(9)*b**S(4)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)*sinh(a + b*x)**S(3), x), x, (c + d*x)**S(2)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*(c + d*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*d*(c + d*x)*sinh(a + b*x)**S(3)/(S(9)*b**S(2)) + S(4)*d*(c + d*x)*sinh(a + b*x)/(S(3)*b**S(2)) + S(2)*d**S(2)*cosh(a + b*x)**S(3)/(S(27)*b**S(3)) - S(14)*d**S(2)*cosh(a + b*x)/(S(9)*b**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)*sinh(a + b*x)**S(3), x), x, (c + d*x)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*(c + d*x)*cosh(a + b*x)/(S(3)*b) - d*sinh(a + b*x)**S(3)/(S(9)*b**S(2)) + S(2)*d*sinh(a + b*x)/(S(3)*b**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/(c + d*x), x), x, -S(3)*CoshIntegral(b*c/d + b*x)*sinh(a - b*c/d)/(S(4)*d) + CoshIntegral(S(3)*b*c/d + S(3)*b*x)*sinh(S(3)*a - S(3)*b*c/d)/(S(4)*d) - S(3)*SinhIntegral(b*c/d + b*x)*cosh(a - b*c/d)/(S(4)*d) + SinhIntegral(S(3)*b*c/d + S(3)*b*x)*cosh(S(3)*a - S(3)*b*c/d)/(S(4)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/(c + d*x)**S(2), x), x, -S(3)*b*CoshIntegral(b*c/d + b*x)*cosh(a - b*c/d)/(S(4)*d**S(2)) + S(3)*b*CoshIntegral(S(3)*b*c/d + S(3)*b*x)*cosh(S(3)*a - S(3)*b*c/d)/(S(4)*d**S(2)) - S(3)*b*SinhIntegral(b*c/d + b*x)*sinh(a - b*c/d)/(S(4)*d**S(2)) + S(3)*b*SinhIntegral(S(3)*b*c/d + S(3)*b*x)*sinh(S(3)*a - S(3)*b*c/d)/(S(4)*d**S(2)) - sinh(a + b*x)**S(3)/(d*(c + d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/(c + d*x)**S(3), x), x, -S(3)*b**S(2)*CoshIntegral(b*c/d + b*x)*sinh(a - b*c/d)/(S(8)*d**S(3)) + S(9)*b**S(2)*CoshIntegral(S(3)*b*c/d + S(3)*b*x)*sinh(S(3)*a - S(3)*b*c/d)/(S(8)*d**S(3)) - S(3)*b**S(2)*SinhIntegral(b*c/d + b*x)*cosh(a - b*c/d)/(S(8)*d**S(3)) + S(9)*b**S(2)*SinhIntegral(S(3)*b*c/d + S(3)*b*x)*cosh(S(3)*a - S(3)*b*c/d)/(S(8)*d**S(3)) - S(3)*b*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(2)*d**S(2)*(c + d*x)) - sinh(a + b*x)**S(3)/(S(2)*d*(c + d*x)**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/sinh(a + b*x), x), x, -S(2)*(c + d*x)**S(3)*atanh(exp(a + b*x))/b - S(3)*d*(c + d*x)**S(2)*PolyLog(S(2), -exp(a + b*x))/b**S(2) + S(3)*d*(c + d*x)**S(2)*PolyLog(S(2), exp(a + b*x))/b**S(2) + S(6)*d**S(2)*(c + d*x)*PolyLog(S(3), -exp(a + b*x))/b**S(3) - S(6)*d**S(2)*(c + d*x)*PolyLog(S(3), exp(a + b*x))/b**S(3) - S(6)*d**S(3)*PolyLog(S(4), -exp(a + b*x))/b**S(4) + S(6)*d**S(3)*PolyLog(S(4), exp(a + b*x))/b**S(4), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/sinh(a + b*x), x), x, -S(2)*(c + d*x)**S(2)*atanh(exp(a + b*x))/b - S(2)*d*(c + d*x)*PolyLog(S(2), -exp(a + b*x))/b**S(2) + S(2)*d*(c + d*x)*PolyLog(S(2), exp(a + b*x))/b**S(2) + S(2)*d**S(2)*PolyLog(S(3), -exp(a + b*x))/b**S(3) - S(2)*d**S(2)*PolyLog(S(3), exp(a + b*x))/b**S(3), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/sinh(a + b*x), x), x, -S(2)*(c + d*x)*atanh(exp(a + b*x))/b - d*PolyLog(S(2), -exp(a + b*x))/b**S(2) + d*PolyLog(S(2), exp(a + b*x))/b**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)*sinh(a + b*x)), x), x, Integrate(S(1)/((c + d*x)*sinh(a + b*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)**S(2)*sinh(a + b*x)), x), x, Integrate(S(1)/((c + d*x)**S(2)*sinh(a + b*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/sinh(a + b*x)**S(2), x), x, -(c + d*x)**S(3)/b - (c + d*x)**S(3)/(b*tanh(a + b*x)) + S(3)*d*(c + d*x)**S(2)*log(-exp(S(2)*a + S(2)*b*x) + S(1))/b**S(2) + S(3)*d**S(2)*(c + d*x)*PolyLog(S(2), exp(S(2)*a + S(2)*b*x))/b**S(3) - S(3)*d**S(3)*PolyLog(S(3), exp(S(2)*a + S(2)*b*x))/(S(2)*b**S(4)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/sinh(a + b*x)**S(2), x), x, -(c + d*x)**S(2)/b - (c + d*x)**S(2)/(b*tanh(a + b*x)) + S(2)*d*(c + d*x)*log(-exp(S(2)*a + S(2)*b*x) + S(1))/b**S(2) + d**S(2)*PolyLog(S(2), exp(S(2)*a + S(2)*b*x))/b**S(3), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/sinh(a + b*x)**S(2), x), x, -(c + d*x)/(b*tanh(a + b*x)) + d*log(sinh(a + b*x))/b**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)*sinh(a + b*x)**S(2)), x), x, Integrate(S(1)/((c + d*x)*sinh(a + b*x)**S(2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)**S(2)*sinh(a + b*x)**S(2)), x), x, Integrate(S(1)/((c + d*x)**S(2)*sinh(a + b*x)**S(2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/sinh(a + b*x)**S(3), x), x, (c + d*x)**S(3)*atanh(exp(a + b*x))/b - (c + d*x)**S(3)/(S(2)*b*sinh(a + b*x)*tanh(a + b*x)) + S(3)*d*(c + d*x)**S(2)*PolyLog(S(2), -exp(a + b*x))/(S(2)*b**S(2)) - S(3)*d*(c + d*x)**S(2)*PolyLog(S(2), exp(a + b*x))/(S(2)*b**S(2)) - S(3)*d*(c + d*x)**S(2)/(S(2)*b**S(2)*sinh(a + b*x)) - S(3)*d**S(2)*(c + d*x)*PolyLog(S(3), -exp(a + b*x))/b**S(3) + S(3)*d**S(2)*(c + d*x)*PolyLog(S(3), exp(a + b*x))/b**S(3) - S(6)*d**S(2)*(c + d*x)*atanh(exp(a + b*x))/b**S(3) - S(3)*d**S(3)*PolyLog(S(2), -exp(a + b*x))/b**S(4) + S(3)*d**S(3)*PolyLog(S(2), exp(a + b*x))/b**S(4) + S(3)*d**S(3)*PolyLog(S(4), -exp(a + b*x))/b**S(4) - S(3)*d**S(3)*PolyLog(S(4), exp(a + b*x))/b**S(4), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/sinh(a + b*x)**S(3), x), x, (c + d*x)**S(2)*atanh(exp(a + b*x))/b - (c + d*x)**S(2)/(S(2)*b*sinh(a + b*x)*tanh(a + b*x)) + d*(c + d*x)*PolyLog(S(2), -exp(a + b*x))/b**S(2) - d*(c + d*x)*PolyLog(S(2), exp(a + b*x))/b**S(2) - d*(c + d*x)/(b**S(2)*sinh(a + b*x)) - d**S(2)*PolyLog(S(3), -exp(a + b*x))/b**S(3) + d**S(2)*PolyLog(S(3), exp(a + b*x))/b**S(3) - d**S(2)*atanh(cosh(a + b*x))/b**S(3), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/sinh(a + b*x)**S(3), x), x, (c + d*x)*atanh(exp(a + b*x))/b - (c + d*x)/(S(2)*b*sinh(a + b*x)*tanh(a + b*x)) + d*PolyLog(S(2), -exp(a + b*x))/(S(2)*b**S(2)) - d*PolyLog(S(2), exp(a + b*x))/(S(2)*b**S(2)) - d/(S(2)*b**S(2)*sinh(a + b*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)*sinh(a + b*x)**S(3)), x), x, Integrate(S(1)/((c + d*x)*sinh(a + b*x)**S(3)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)**S(2)*sinh(a + b*x)**S(3)), x), x, Integrate(S(1)/((c + d*x)**S(2)*sinh(a + b*x)**S(3)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**(S(5)/2)*sinh(a + b*x), x), x, -S(15)*sqrt(Pi)*d**(S(5)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(16)*b**(S(7)/2)) - S(15)*sqrt(Pi)*d**(S(5)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(16)*b**(S(7)/2)) + (c + d*x)**(S(5)/2)*cosh(a + b*x)/b - S(5)*d*(c + d*x)**(S(3)/2)*sinh(a + b*x)/(S(2)*b**S(2)) + S(15)*d**S(2)*sqrt(c + d*x)*cosh(a + b*x)/(S(4)*b**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**(S(3)/2)*sinh(a + b*x), x), x, -S(3)*sqrt(Pi)*d**(S(3)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(8)*b**(S(5)/2)) + S(3)*sqrt(Pi)*d**(S(3)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(8)*b**(S(5)/2)) + (c + d*x)**(S(3)/2)*cosh(a + b*x)/b - S(3)*d*sqrt(c + d*x)*sinh(a + b*x)/(S(2)*b**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(c + d*x)*sinh(a + b*x), x), x, -sqrt(Pi)*sqrt(d)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(4)*b**(S(3)/2)) - sqrt(Pi)*sqrt(d)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(4)*b**(S(3)/2)) + sqrt(c + d*x)*cosh(a + b*x)/b, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/sqrt(c + d*x), x), x, -sqrt(Pi)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(2)*sqrt(b)*sqrt(d)) + sqrt(Pi)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(2)*sqrt(b)*sqrt(d)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/(c + d*x)**(S(3)/2), x), x, sqrt(Pi)*sqrt(b)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/d**(S(3)/2) + sqrt(Pi)*sqrt(b)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/d**(S(3)/2) - S(2)*sinh(a + b*x)/(d*sqrt(c + d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/(c + d*x)**(S(5)/2), x), x, -S(2)*sqrt(Pi)*b**(S(3)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(3)*d**(S(5)/2)) + S(2)*sqrt(Pi)*b**(S(3)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(3)*d**(S(5)/2)) - S(4)*b*cosh(a + b*x)/(S(3)*d**S(2)*sqrt(c + d*x)) - S(2)*sinh(a + b*x)/(S(3)*d*(c + d*x)**(S(3)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/(c + d*x)**(S(7)/2), x), x, S(4)*sqrt(Pi)*b**(S(5)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(15)*d**(S(7)/2)) + S(4)*sqrt(Pi)*b**(S(5)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(15)*d**(S(7)/2)) - S(8)*b**S(2)*sinh(a + b*x)/(S(15)*d**S(3)*sqrt(c + d*x)) - S(4)*b*cosh(a + b*x)/(S(15)*d**S(2)*(c + d*x)**(S(3)/2)) - S(2)*sinh(a + b*x)/(S(5)*d*(c + d*x)**(S(5)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**(S(5)/2)*sinh(a + b*x)**S(2), x), x, S(15)*sqrt(S(2))*sqrt(Pi)*d**(S(5)/2)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(512)*b**(S(7)/2)) - S(15)*sqrt(S(2))*sqrt(Pi)*d**(S(5)/2)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(512)*b**(S(7)/2)) - (c + d*x)**(S(7)/2)/(S(7)*d) + (c + d*x)**(S(5)/2)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b) - S(5)*d*(c + d*x)**(S(3)/2)*sinh(a + b*x)**S(2)/(S(8)*b**S(2)) - S(5)*d*(c + d*x)**(S(3)/2)/(S(16)*b**S(2)) + S(15)*d**S(2)*sqrt(c + d*x)*sinh(S(2)*a + S(2)*b*x)/(S(64)*b**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**(S(3)/2)*sinh(a + b*x)**S(2), x), x, S(3)*sqrt(S(2))*sqrt(Pi)*d**(S(3)/2)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(128)*b**(S(5)/2)) + S(3)*sqrt(S(2))*sqrt(Pi)*d**(S(3)/2)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(128)*b**(S(5)/2)) - (c + d*x)**(S(5)/2)/(S(5)*d) + (c + d*x)**(S(3)/2)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b) - S(3)*d*sqrt(c + d*x)*sinh(a + b*x)**S(2)/(S(8)*b**S(2)) - S(3)*d*sqrt(c + d*x)/(S(16)*b**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(c + d*x)*sinh(a + b*x)**S(2), x), x, sqrt(S(2))*sqrt(Pi)*sqrt(d)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(32)*b**(S(3)/2)) - sqrt(S(2))*sqrt(Pi)*sqrt(d)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(32)*b**(S(3)/2)) - (c + d*x)**(S(3)/2)/(S(3)*d) + sqrt(c + d*x)*sinh(S(2)*a + S(2)*b*x)/(S(4)*b), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/sqrt(c + d*x), x), x, sqrt(S(2))*sqrt(Pi)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(8)*sqrt(b)*sqrt(d)) + sqrt(S(2))*sqrt(Pi)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(8)*sqrt(b)*sqrt(d)) - sqrt(c + d*x)/d, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**(S(3)/2), x), x, -sqrt(S(2))*sqrt(Pi)*sqrt(b)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(2)*d**(S(3)/2)) + sqrt(S(2))*sqrt(Pi)*sqrt(b)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(2)*d**(S(3)/2)) - S(2)*sinh(a + b*x)**S(2)/(d*sqrt(c + d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**(S(5)/2), x), x, S(2)*sqrt(S(2))*sqrt(Pi)*b**(S(3)/2)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(3)*d**(S(5)/2)) + S(2)*sqrt(S(2))*sqrt(Pi)*b**(S(3)/2)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(3)*d**(S(5)/2)) - S(8)*b*sinh(a + b*x)*cosh(a + b*x)/(S(3)*d**S(2)*sqrt(c + d*x)) - S(2)*sinh(a + b*x)**S(2)/(S(3)*d*(c + d*x)**(S(3)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**(S(7)/2), x), x, -S(8)*sqrt(S(2))*sqrt(Pi)*b**(S(5)/2)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(15)*d**(S(7)/2)) + S(8)*sqrt(S(2))*sqrt(Pi)*b**(S(5)/2)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(15)*d**(S(7)/2)) - S(32)*b**S(2)*sinh(a + b*x)**S(2)/(S(15)*d**S(3)*sqrt(c + d*x)) - S(16)*b**S(2)/(S(15)*d**S(3)*sqrt(c + d*x)) - S(8)*b*sinh(a + b*x)*cosh(a + b*x)/(S(15)*d**S(2)*(c + d*x)**(S(3)/2)) - S(2)*sinh(a + b*x)**S(2)/(S(5)*d*(c + d*x)**(S(5)/2)), expand=True, _diff=True, _numerical=True)
# taking long time assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**(S(9)/2), x), x, S(32)*sqrt(S(2))*sqrt(Pi)*b**(S(7)/2)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(105)*d**(S(9)/2)) + S(32)*sqrt(S(2))*sqrt(Pi)*b**(S(7)/2)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(105)*d**(S(9)/2)) - S(128)*b**S(3)*sinh(a + b*x)*cosh(a + b*x)/(S(105)*d**S(4)*sqrt(c + d*x)) - S(32)*b**S(2)*sinh(a + b*x)**S(2)/(S(105)*d**S(3)*(c + d*x)**(S(3)/2)) - S(16)*b**S(2)/(S(105)*d**S(3)*(c + d*x)**(S(3)/2)) - S(8)*b*sinh(a + b*x)*cosh(a + b*x)/(S(35)*d**S(2)*(c + d*x)**(S(5)/2)) - S(2)*sinh(a + b*x)**S(2)/(S(7)*d*(c + d*x)**(S(7)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**(S(5)/2)*sinh(a + b*x)**S(3), x), x, S(45)*sqrt(Pi)*d**(S(5)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(64)*b**(S(7)/2)) - S(5)*sqrt(S(3))*sqrt(Pi)*d**(S(5)/2)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(1728)*b**(S(7)/2)) + S(45)*sqrt(Pi)*d**(S(5)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(64)*b**(S(7)/2)) - S(5)*sqrt(S(3))*sqrt(Pi)*d**(S(5)/2)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(1728)*b**(S(7)/2)) + (c + d*x)**(S(5)/2)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*(c + d*x)**(S(5)/2)*cosh(a + b*x)/(S(3)*b) - S(5)*d*(c + d*x)**(S(3)/2)*sinh(a + b*x)**S(3)/(S(18)*b**S(2)) + S(5)*d*(c + d*x)**(S(3)/2)*sinh(a + b*x)/(S(3)*b**S(2)) - S(45)*d**S(2)*sqrt(c + d*x)*cosh(a + b*x)/(S(16)*b**S(3)) + S(5)*d**S(2)*sqrt(c + d*x)*cosh(S(3)*a + S(3)*b*x)/(S(144)*b**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**(S(3)/2)*sinh(a + b*x)**S(3), x), x, S(9)*sqrt(Pi)*d**(S(3)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(32)*b**(S(5)/2)) - sqrt(S(3))*sqrt(Pi)*d**(S(3)/2)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(288)*b**(S(5)/2)) - S(9)*sqrt(Pi)*d**(S(3)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(32)*b**(S(5)/2)) + sqrt(S(3))*sqrt(Pi)*d**(S(3)/2)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(288)*b**(S(5)/2)) + (c + d*x)**(S(3)/2)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*(c + d*x)**(S(3)/2)*cosh(a + b*x)/(S(3)*b) - d*sqrt(c + d*x)*sinh(a + b*x)**S(3)/(S(6)*b**S(2)) + d*sqrt(c + d*x)*sinh(a + b*x)/b**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(c + d*x)*sinh(a + b*x)**S(3), x), x, S(3)*sqrt(Pi)*sqrt(d)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(16)*b**(S(3)/2)) - sqrt(S(3))*sqrt(Pi)*sqrt(d)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(144)*b**(S(3)/2)) + S(3)*sqrt(Pi)*sqrt(d)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(16)*b**(S(3)/2)) - sqrt(S(3))*sqrt(Pi)*sqrt(d)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(144)*b**(S(3)/2)) - S(3)*sqrt(c + d*x)*cosh(a + b*x)/(S(4)*b) + sqrt(c + d*x)*cosh(S(3)*a + S(3)*b*x)/(S(12)*b), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/sqrt(c + d*x), x), x, S(3)*sqrt(Pi)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(8)*sqrt(b)*sqrt(d)) - sqrt(S(3))*sqrt(Pi)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(24)*sqrt(b)*sqrt(d)) - S(3)*sqrt(Pi)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(8)*sqrt(b)*sqrt(d)) + sqrt(S(3))*sqrt(Pi)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(24)*sqrt(b)*sqrt(d)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/(c + d*x)**(S(3)/2), x), x, -S(3)*sqrt(Pi)*sqrt(b)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(4)*d**(S(3)/2)) + sqrt(S(3))*sqrt(Pi)*sqrt(b)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(4)*d**(S(3)/2)) - S(3)*sqrt(Pi)*sqrt(b)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(4)*d**(S(3)/2)) + sqrt(S(3))*sqrt(Pi)*sqrt(b)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(4)*d**(S(3)/2)) - S(2)*sinh(a + b*x)**S(3)/(d*sqrt(c + d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/(c + d*x)**(S(5)/2), x), x, sqrt(Pi)*b**(S(3)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(2)*d**(S(5)/2)) - sqrt(S(3))*sqrt(Pi)*b**(S(3)/2)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(2)*d**(S(5)/2)) - sqrt(Pi)*b**(S(3)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(2)*d**(S(5)/2)) + sqrt(S(3))*sqrt(Pi)*b**(S(3)/2)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(2)*d**(S(5)/2)) - S(4)*b*sinh(a + b*x)**S(2)*cosh(a + b*x)/(d**S(2)*sqrt(c + d*x)) - S(2)*sinh(a + b*x)**S(3)/(S(3)*d*(c + d*x)**(S(3)/2)), expand=True, _diff=True, _numerical=True)
# long time assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/(c + d*x)**(S(7)/2), x), x, -sqrt(Pi)*b**(S(5)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(5)*d**(S(7)/2)) + S(3)*sqrt(S(3))*sqrt(Pi)*b**(S(5)/2)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(5)*d**(S(7)/2)) - sqrt(Pi)*b**(S(5)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(5)*d**(S(7)/2)) + S(3)*sqrt(S(3))*sqrt(Pi)*b**(S(5)/2)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(5)*d**(S(7)/2)) - S(24)*b**S(2)*sinh(a + b*x)**S(3)/(S(5)*d**S(3)*sqrt(c + d*x)) - S(16)*b**S(2)*sinh(a + b*x)/(S(5)*d**S(3)*sqrt(c + d*x)) - S(4)*b*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(5)*d**S(2)*(c + d*x)**(S(3)/2)) - S(2)*sinh(a + b*x)**S(3)/(S(5)*d*(c + d*x)**(S(5)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((d*x)**(S(3)/2)*sinh(f*x), x), x, -S(3)*sqrt(Pi)*d**(S(3)/2)*Erf(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(8)*f**(S(5)/2)) + S(3)*sqrt(Pi)*d**(S(3)/2)*Erfi(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(8)*f**(S(5)/2)) - S(3)*d*sqrt(d*x)*sinh(f*x)/(S(2)*f**S(2)) + (d*x)**(S(3)/2)*cosh(f*x)/f, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(d*x)*sinh(f*x), x), x, -sqrt(Pi)*sqrt(d)*Erf(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(4)*f**(S(3)/2)) - sqrt(Pi)*sqrt(d)*Erfi(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(4)*f**(S(3)/2)) + sqrt(d*x)*cosh(f*x)/f, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(f*x)/sqrt(d*x), x), x, -sqrt(Pi)*Erf(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(2)*sqrt(d)*sqrt(f)) + sqrt(Pi)*Erfi(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(2)*sqrt(d)*sqrt(f)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(f*x)/(d*x)**(S(3)/2), x), x, sqrt(Pi)*sqrt(f)*Erf(sqrt(f)*sqrt(d*x)/sqrt(d))/d**(S(3)/2) + sqrt(Pi)*sqrt(f)*Erfi(sqrt(f)*sqrt(d*x)/sqrt(d))/d**(S(3)/2) - S(2)*sinh(f*x)/(d*sqrt(d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(f*x)/(d*x)**(S(5)/2), x), x, -S(2)*sqrt(Pi)*f**(S(3)/2)*Erf(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(3)*d**(S(5)/2)) + S(2)*sqrt(Pi)*f**(S(3)/2)*Erfi(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(3)*d**(S(5)/2)) - S(2)*sinh(f*x)/(S(3)*d*(d*x)**(S(3)/2)) - S(4)*f*cosh(f*x)/(S(3)*d**S(2)*sqrt(d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(c + d*x)/sinh(a + b*x), x), x, Integrate(sqrt(c + d*x)/sinh(a + b*x), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/(sqrt(c + d*x)*sinh(a + b*x)), x), x, Integrate(S(1)/(sqrt(c + d*x)*sinh(a + b*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(x)**(S(3)/2)/x**S(3), x), x, S(3)*Integrate(S(1)/(x*sqrt(sinh(x))), x)/S(8) + S(9)*Integrate(sinh(x)**(S(3)/2)/x, x)/S(8) - S(3)*sqrt(sinh(x))*cosh(x)/(S(4)*x) - sinh(x)**(S(3)/2)/(S(2)*x**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(-x*sqrt(sinh(x)) + x/sinh(x)**(S(3)/2), x), x, -S(2)*x*cosh(x)/sqrt(sinh(x)) + S(4)*sqrt(sinh(x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x/(S(3)*sqrt(sinh(x))) + x/sinh(x)**(S(5)/2), x), x, -S(2)*x*cosh(x)/(S(3)*sinh(x)**(S(3)/2)) - S(4)/(S(3)*sqrt(sinh(x))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(3)*x*sqrt(sinh(x))/S(5) + x/sinh(x)**(S(7)/2), x), x, S(6)*x*cosh(x)/(S(5)*sqrt(sinh(x))) - S(2)*x*cosh(x)/(S(5)*sinh(x)**(S(5)/2)) - S(12)*sqrt(sinh(x))/S(5) - S(4)/(S(15)*sinh(x)**(S(3)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(-x**S(2)*sqrt(sinh(x)) + x**S(2)/sinh(x)**(S(3)/2), x), x, -S(2)*x**S(2)*cosh(x)/sqrt(sinh(x)) + S(8)*x*sqrt(sinh(x)) - S(16)*I*EllipticE(Pi/S(4) - I*x/S(2), S(2))*sqrt(sinh(x))/sqrt(I*sinh(x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((b*sinh(e + f*x))**n*(c + d*x)**m, x), x, Integrate((b*sinh(e + f*x))**n*(c + d*x)**m, x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*sinh(a + b*x)**S(3), x), x, S(3)**(-m + S(-1))*(-b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(3)*b*(c + d*x)/d)*exp(S(3)*a - S(3)*b*c/d)/(S(8)*b) + S(3)**(-m + S(-1))*(b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(3)*b*(c + d*x)/d)*exp(-S(3)*a + S(3)*b*c/d)/(S(8)*b) - S(3)*(-b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -b*(c + d*x)/d)*exp(a - b*c/d)/(S(8)*b) - S(3)*(b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), b*(c + d*x)/d)*exp(-a + b*c/d)/(S(8)*b), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*sinh(a + b*x)**S(2), x), x, S(2)**(-m + S(-3))*(-b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(2)*b*(c + d*x)/d)*exp(S(2)*a - S(2)*b*c/d)/b - S(2)**(-m + S(-3))*(b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(2)*b*(c + d*x)/d)*exp(-S(2)*a + S(2)*b*c/d)/b - (c + d*x)**(m + S(1))/(S(2)*d*(m + S(1))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*sinh(a + b*x), x), x, (-b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -b*(c + d*x)/d)*exp(a - b*c/d)/(S(2)*b) + (b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), b*(c + d*x)/d)*exp(-a + b*c/d)/(S(2)*b), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m/sinh(a + b*x), x), x, Integrate((c + d*x)**m/sinh(a + b*x), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m/sinh(a + b*x)**S(2), x), x, Integrate((c + d*x)**m/sinh(a + b*x)**S(2), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(3))*sinh(a + b*x), x), x, -x**m*(-b*x)**(-m)*Gamma(m + S(4), -b*x)*exp(a)/(S(2)*b**S(4)) + x**m*(b*x)**(-m)*Gamma(m + S(4), b*x)*exp(-a)/(S(2)*b**S(4)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(2))*sinh(a + b*x), x), x, x**m*(-b*x)**(-m)*Gamma(m + S(3), -b*x)*exp(a)/(S(2)*b**S(3)) + x**m*(b*x)**(-m)*Gamma(m + S(3), b*x)*exp(-a)/(S(2)*b**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(1))*sinh(a + b*x), x), x, -x**m*(-b*x)**(-m)*Gamma(m + S(2), -b*x)*exp(a)/(S(2)*b**S(2)) + x**m*(b*x)**(-m)*Gamma(m + S(2), b*x)*exp(-a)/(S(2)*b**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**m*sinh(a + b*x), x), x, x**m*(-b*x)**(-m)*Gamma(m + S(1), -b*x)*exp(a)/(S(2)*b) + x**m*(b*x)**(-m)*Gamma(m + S(1), b*x)*exp(-a)/(S(2)*b), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(-1))*sinh(a + b*x), x), x, -x**m*(-b*x)**(-m)*Gamma(m, -b*x)*exp(a)/S(2) + x**m*(b*x)**(-m)*Gamma(m, b*x)*exp(-a)/S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(-2))*sinh(a + b*x), x), x, b*x**m*(-b*x)**(-m)*Gamma(m + S(-1), -b*x)*exp(a)/S(2) + b*x**m*(b*x)**(-m)*Gamma(m + S(-1), b*x)*exp(-a)/S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(-3))*sinh(a + b*x), x), x, -b**S(2)*x**m*(-b*x)**(-m)*Gamma(m + S(-2), -b*x)*exp(a)/S(2) + b**S(2)*x**m*(b*x)**(-m)*Gamma(m + S(-2), b*x)*exp(-a)/S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(3))*sinh(a + b*x)**S(2), x), x, -S(2)**(-m + S(-6))*x**m*(-b*x)**(-m)*Gamma(m + S(4), -S(2)*b*x)*exp(S(2)*a)/b**S(4) - S(2)**(-m + S(-6))*x**m*(b*x)**(-m)*Gamma(m + S(4), S(2)*b*x)*exp(-S(2)*a)/b**S(4) - x**(m + S(4))/(S(2)*m + S(8)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(2))*sinh(a + b*x)**S(2), x), x, S(2)**(-m + S(-5))*x**m*(-b*x)**(-m)*Gamma(m + S(3), -S(2)*b*x)*exp(S(2)*a)/b**S(3) - S(2)**(-m + S(-5))*x**m*(b*x)**(-m)*Gamma(m + S(3), S(2)*b*x)*exp(-S(2)*a)/b**S(3) - x**(m + S(3))/(S(2)*m + S(6)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(1))*sinh(a + b*x)**S(2), x), x, -S(2)**(-m + S(-4))*x**m*(-b*x)**(-m)*Gamma(m + S(2), -S(2)*b*x)*exp(S(2)*a)/b**S(2) - S(2)**(-m + S(-4))*x**m*(b*x)**(-m)*Gamma(m + S(2), S(2)*b*x)*exp(-S(2)*a)/b**S(2) - x**(m + S(2))/(S(2)*m + S(4)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**m*sinh(a + b*x)**S(2), x), x, S(2)**(-m + S(-3))*x**m*(-b*x)**(-m)*Gamma(m + S(1), -S(2)*b*x)*exp(S(2)*a)/b - S(2)**(-m + S(-3))*x**m*(b*x)**(-m)*Gamma(m + S(1), S(2)*b*x)*exp(-S(2)*a)/b - x**(m + S(1))/(S(2)*m + S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(-1))*sinh(a + b*x)**S(2), x), x, -S(2)**(-m + S(-2))*x**m*(-b*x)**(-m)*Gamma(m, -S(2)*b*x)*exp(S(2)*a) - S(2)**(-m + S(-2))*x**m*(b*x)**(-m)*Gamma(m, S(2)*b*x)*exp(-S(2)*a) - x**m/(S(2)*m), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(-2))*sinh(a + b*x)**S(2), x), x, S(2)**(-m + S(-1))*b*x**m*(-b*x)**(-m)*Gamma(m + S(-1), -S(2)*b*x)*exp(S(2)*a) - S(2)**(-m + S(-1))*b*x**m*(b*x)**(-m)*Gamma(m + S(-1), S(2)*b*x)*exp(-S(2)*a) + x**(m + S(-1))/(-S(2)*m + S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(-3))*sinh(a + b*x)**S(2), x), x, x**(m + S(-2))/(-S(2)*m + S(4)) - S(2)**(-m)*b**S(2)*x**m*(-b*x)**(-m)*Gamma(m + S(-2), -S(2)*b*x)*exp(S(2)*a) - S(2)**(-m)*b**S(2)*x**m*(b*x)**(-m)*Gamma(m + S(-2), S(2)*b*x)*exp(-S(2)*a), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x*sqrt(S(1)/sinh(x))/S(3) + x/(S(1)/sinh(x))**(S(3)/2), x), x, S(2)*x*cosh(x)/(S(3)*sqrt(S(1)/sinh(x))) - S(4)/(S(9)*(S(1)/sinh(x))**(S(3)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(3)*x/(S(5)*sqrt(S(1)/sinh(x))) + x/(S(1)/sinh(x))**(S(5)/2), x), x, S(2)*x*cosh(x)/(S(5)*(S(1)/sinh(x))**(S(3)/2)) - S(4)/(S(25)*(S(1)/sinh(x))**(S(5)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(-S(5)*x*sqrt(S(1)/sinh(x))/S(21) + x/(S(1)/sinh(x))**(S(7)/2), x), x, -S(10)*x*cosh(x)/(S(21)*sqrt(S(1)/sinh(x))) + S(2)*x*cosh(x)/(S(7)*(S(1)/sinh(x))**(S(5)/2)) + S(20)/(S(63)*(S(1)/sinh(x))**(S(3)/2)) - S(4)/(S(49)*(S(1)/sinh(x))**(S(7)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**S(2)*sqrt(S(1)/sinh(x))/S(3) + x**S(2)/(S(1)/sinh(x))**(S(3)/2), x), x, S(2)*x**S(2)*cosh(x)/(S(3)*sqrt(S(1)/sinh(x))) - S(8)*x/(S(9)*(S(1)/sinh(x))**(S(3)/2)) - S(16)*I*sqrt(I*sinh(x))*sqrt(S(1)/sinh(x))*EllipticF(Pi/S(4) - I*x/S(2), S(2))/S(27) + S(16)*cosh(x)/(S(27)*sqrt(S(1)/sinh(x))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)*(I*a*sinh(e + f*x) + a), x), x, -S(6)*I*a*d**S(3)*sinh(e + f*x)/f**S(4) + S(6)*I*a*d**S(2)*(c + d*x)*cosh(e + f*x)/f**S(3) - S(3)*I*a*d*(c + d*x)**S(2)*sinh(e + f*x)/f**S(2) + I*a*(c + d*x)**S(3)*cosh(e + f*x)/f + a*(c + d*x)**S(4)/(S(4)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)*(I*a*sinh(e + f*x) + a), x), x, S(2)*I*a*d**S(2)*cosh(e + f*x)/f**S(3) - S(2)*I*a*d*(c + d*x)*sinh(e + f*x)/f**S(2) + I*a*(c + d*x)**S(2)*cosh(e + f*x)/f + a*(c + d*x)**S(3)/(S(3)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)*(I*a*sinh(e + f*x) + a), x), x, -I*a*d*sinh(e + f*x)/f**S(2) + I*a*(c + d*x)*cosh(e + f*x)/f + a*(c + d*x)**S(2)/(S(2)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)/(c + d*x), x), x, I*a*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d + I*a*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d + a*log(c + d*x)/d, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)/(c + d*x)**S(2), x), x, -I*a*sinh(e + f*x)/(d*(c + d*x)) - a/(d*(c + d*x)) + I*a*f*CoshIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d**S(2) + I*a*f*SinhIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)/(c + d*x)**S(3), x), x, -I*a*sinh(e + f*x)/(S(2)*d*(c + d*x)**S(2)) - a/(S(2)*d*(c + d*x)**S(2)) - I*a*f*cosh(e + f*x)/(S(2)*d**S(2)*(c + d*x)) + I*a*f**S(2)*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/(S(2)*d**S(3)) + I*a*f**S(2)*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/(S(2)*d**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)*(I*a*sinh(e + f*x) + a)**S(2), x), x, S(3)*a**S(2)*c*d**S(2)*x/(S(4)*f**S(2)) + S(3)*a**S(2)*d**S(3)*x**S(2)/(S(8)*f**S(2)) + S(3)*a**S(2)*d**S(3)*sinh(e + f*x)**S(2)/(S(8)*f**S(4)) - S(12)*I*a**S(2)*d**S(3)*sinh(e + f*x)/f**S(4) - S(3)*a**S(2)*d**S(2)*(c + d*x)*sinh(e + f*x)*cosh(e + f*x)/(S(4)*f**S(3)) + S(12)*I*a**S(2)*d**S(2)*(c + d*x)*cosh(e + f*x)/f**S(3) + S(3)*a**S(2)*d*(c + d*x)**S(2)*sinh(e + f*x)**S(2)/(S(4)*f**S(2)) - S(6)*I*a**S(2)*d*(c + d*x)**S(2)*sinh(e + f*x)/f**S(2) - a**S(2)*(c + d*x)**S(3)*sinh(e + f*x)*cosh(e + f*x)/(S(2)*f) + S(2)*I*a**S(2)*(c + d*x)**S(3)*cosh(e + f*x)/f + S(3)*a**S(2)*(c + d*x)**S(4)/(S(8)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)*(I*a*sinh(e + f*x) + a)**S(2), x), x, a**S(2)*d**S(2)*x/(S(4)*f**S(2)) - a**S(2)*d**S(2)*sinh(e + f*x)*cosh(e + f*x)/(S(4)*f**S(3)) + S(4)*I*a**S(2)*d**S(2)*cosh(e + f*x)/f**S(3) + a**S(2)*d*(c + d*x)*sinh(e + f*x)**S(2)/(S(2)*f**S(2)) - S(4)*I*a**S(2)*d*(c + d*x)*sinh(e + f*x)/f**S(2) - a**S(2)*(c + d*x)**S(2)*sinh(e + f*x)*cosh(e + f*x)/(S(2)*f) + S(2)*I*a**S(2)*(c + d*x)**S(2)*cosh(e + f*x)/f + a**S(2)*(c + d*x)**S(3)/(S(2)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)*(I*a*sinh(e + f*x) + a)**S(2), x), x, a**S(2)*c*x/S(2) + a**S(2)*d*x**S(2)/S(4) + a**S(2)*d*sinh(e + f*x)**S(2)/(S(4)*f**S(2)) - S(2)*I*a**S(2)*d*sinh(e + f*x)/f**S(2) - a**S(2)*(c + d*x)*sinh(e + f*x)*cosh(e + f*x)/(S(2)*f) + S(2)*I*a**S(2)*(c + d*x)*cosh(e + f*x)/f + a**S(2)*(c + d*x)**S(2)/(S(2)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)**S(2)/(c + d*x), x), x, S(2)*I*a**S(2)*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d - a**S(2)*CoshIntegral(S(2)*c*f/d + S(2)*f*x)*cosh(-S(2)*c*f/d + S(2)*e)/(S(2)*d) + S(2)*I*a**S(2)*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d - a**S(2)*SinhIntegral(S(2)*c*f/d + S(2)*f*x)*sinh(-S(2)*c*f/d + S(2)*e)/(S(2)*d) + S(3)*a**S(2)*log(c + d*x)/(S(2)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)**S(2)/(c + d*x)**S(2), x), x, -S(4)*a**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(4)/(d*(c + d*x)) + S(2)*I*a**S(2)*f*CoshIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d**S(2) - a**S(2)*f*CoshIntegral(S(2)*c*f/d + S(2)*f*x)*sinh(-S(2)*c*f/d + S(2)*e)/d**S(2) + S(2)*I*a**S(2)*f*SinhIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d**S(2) - a**S(2)*f*SinhIntegral(S(2)*c*f/d + S(2)*f*x)*cosh(-S(2)*c*f/d + S(2)*e)/d**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)**S(2)/(c + d*x)**S(3), x), x, -S(2)*a**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(4)/(d*(c + d*x)**S(2)) - S(4)*a**S(2)*f*sinh(I*Pi/S(4) + e/S(2) + f*x/S(2))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(3)/(d**S(2)*(c + d*x)) + I*a**S(2)*f**S(2)*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d**S(3) - a**S(2)*f**S(2)*CoshIntegral(S(2)*c*f/d + S(2)*f*x)*cosh(-S(2)*c*f/d + S(2)*e)/d**S(3) + I*a**S(2)*f**S(2)*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d**S(3) - a**S(2)*f**S(2)*SinhIntegral(S(2)*c*f/d + S(2)*f*x)*sinh(-S(2)*c*f/d + S(2)*e)/d**S(3), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/(I*a*sinh(e + f*x) + a), x), x, S(12)*d**S(3)*PolyLog(S(3), -exp(I*Pi/S(2) + e + f*x))/(a*f**S(4)) - S(12)*d**S(2)*(c + d*x)*PolyLog(S(2), -exp(I*Pi/S(2) + e + f*x))/(a*f**S(3)) - S(6)*d*(c + d*x)**S(2)*log(exp(I*Pi/S(2) + e + f*x) + S(1))/(a*f**S(2)) + (c + d*x)**S(3)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f) + (c + d*x)**S(3)/(a*f), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/(I*a*sinh(e + f*x) + a), x), x, -S(4)*d**S(2)*PolyLog(S(2), -exp(I*Pi/S(2) + e + f*x))/(a*f**S(3)) - S(4)*d*(c + d*x)*log(exp(I*Pi/S(2) + e + f*x) + S(1))/(a*f**S(2)) + (c + d*x)**S(2)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f) + (c + d*x)**S(2)/(a*f), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/(I*a*sinh(e + f*x) + a), x), x, -S(2)*d*log(cosh(I*Pi/S(4) + e/S(2) + f*x/S(2)))/(a*f**S(2)) + (c + d*x)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)*(I*a*sinh(e + f*x) + a)), x), x, Integrate(S(1)/((c + d*x)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)), x)/(S(2)*a), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)**S(2)*(I*a*sinh(e + f*x) + a)), x), x, Integrate(S(1)/((c + d*x)**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)), x)/(S(2)*a), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/(I*a*sinh(e + f*x) + a)**S(2), x), x, S(4)*d**S(3)*PolyLog(S(3), -exp(I*Pi/S(2) + e + f*x))/(a**S(2)*f**S(4)) + S(4)*d**S(3)*log(cosh(I*Pi/S(4) + e/S(2) + f*x/S(2)))/(a**S(2)*f**S(4)) - S(4)*d**S(2)*(c + d*x)*PolyLog(S(2), -exp(I*Pi/S(2) + e + f*x))/(a**S(2)*f**S(3)) - S(2)*d**S(2)*(c + d*x)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a**S(2)*f**S(3)) - S(2)*d*(c + d*x)**S(2)*log(exp(I*Pi/S(2) + e + f*x) + S(1))/(a**S(2)*f**S(2)) + d*(c + d*x)**S(2)/(S(2)*a**S(2)*f**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)) + (c + d*x)**S(3)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(3)*a**S(2)*f) + (c + d*x)**S(3)/(S(3)*a**S(2)*f) + (c + d*x)**S(3)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(6)*a**S(2)*f*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/(I*a*sinh(e + f*x) + a)**S(2), x), x, -S(4)*d**S(2)*PolyLog(S(2), -exp(I*Pi/S(2) + e + f*x))/(S(3)*a**S(2)*f**S(3)) - S(2)*d**S(2)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(3)*a**S(2)*f**S(3)) - S(4)*d*(c + d*x)*log(exp(I*Pi/S(2) + e + f*x) + S(1))/(S(3)*a**S(2)*f**S(2)) + d*(c + d*x)/(S(3)*a**S(2)*f**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)) + (c + d*x)**S(2)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(3)*a**S(2)*f) + (c + d*x)**S(2)/(S(3)*a**S(2)*f) + (c + d*x)**S(2)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(6)*a**S(2)*f*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/(I*a*sinh(e + f*x) + a)**S(2), x), x, -S(2)*d*log(cosh(I*Pi/S(4) + e/S(2) + f*x/S(2)))/(S(3)*a**S(2)*f**S(2)) + d/(S(6)*a**S(2)*f**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)) + (c + d*x)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(3)*a**S(2)*f) + (c + d*x)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(6)*a**S(2)*f*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)*(I*a*sinh(e + f*x) + a)**S(2)), x), x, Integrate(S(1)/((c + d*x)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(4)), x)/(S(4)*a**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)**S(2)*(I*a*sinh(e + f*x) + a)**S(2)), x), x, Integrate(S(1)/((c + d*x)**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(4)), x)/(S(4)*a**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(I*a*sinh(e + f*x) + a)/x, x), x, sqrt(I*a*sinh(e + f*x) + a)*CoshIntegral(f*x/S(2))*cosh(I*Pi/S(4) + e/S(2))/cosh(I*Pi/S(4) + e/S(2) + f*x/S(2)) + sqrt(I*a*sinh(e + f*x) + a)*SinhIntegral(f*x/S(2))*sinh(I*Pi/S(4) + e/S(2))/cosh(I*Pi/S(4) + e/S(2) + f*x/S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(I*a*sinh(e + f*x) + a)/x**S(2), x), x, f*sqrt(I*a*sinh(e + f*x) + a)*CoshIntegral(f*x/S(2))*sinh(I*Pi/S(4) + e/S(2))/(S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))) + f*sqrt(I*a*sinh(e + f*x) + a)*SinhIntegral(f*x/S(2))*cosh(I*Pi/S(4) + e/S(2))/(S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))) - sqrt(I*a*sinh(e + f*x) + a)/x, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(I*a*sinh(e + f*x) + a)/x**S(3), x), x, f**S(2)*sqrt(I*a*sinh(e + f*x) + a)*CoshIntegral(f*x/S(2))*cosh(I*Pi/S(4) + e/S(2))/(S(8)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))) + f**S(2)*sqrt(I*a*sinh(e + f*x) + a)*SinhIntegral(f*x/S(2))*sinh(I*Pi/S(4) + e/S(2))/(S(8)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))) - f*sqrt(I*a*sinh(e + f*x) + a)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(4)*x) - sqrt(I*a*sinh(e + f*x) + a)/(S(2)*x**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/(x*sqrt(I*a*sinh(e + f*x) + a)), x), x, Integrate(S(1)/(x*sqrt(I*a*sinh(e + f*x) + a)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/(x**S(2)*sqrt(I*a*sinh(e + f*x) + a)), x), x, Integrate(S(1)/(x**S(2)*sqrt(I*a*sinh(e + f*x) + a)), x), expand=True, _diff=True, _numerical=True)
''' long time
# assert rubi_test(rubi_integrate(x**S(3)/(I*a*sinh(e + f*x) + a)**(S(3)/2), x), x, x**S(3)*ArcTan(exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f*sqrt(I*a*sinh(e + f*x) + a)) + x**S(3)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(2)*a*f*sqrt(I*a*sinh(e + f*x) + a)) - S(3)*I*x**S(2)*PolyLog(S(2), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) + S(3)*I*x**S(2)*PolyLog(S(2), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) + S(3)*x**S(2)/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) - S(24)*x*ArcTan(exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(3)*sqrt(I*a*sinh(e + f*x) + a)) + S(12)*I*x*PolyLog(S(3), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(3)*sqrt(I*a*sinh(e + f*x) + a)) - S(12)*I*x*PolyLog(S(3), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(3)*sqrt(I*a*sinh(e + f*x) + a)) + S(24)*I*PolyLog(S(2), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(4)*sqrt(I*a*sinh(e + f*x) + a)) - S(24)*I*PolyLog(S(2), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(4)*sqrt(I*a*sinh(e + f*x) + a)) - S(24)*I*PolyLog(S(4), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(4)*sqrt(I*a*sinh(e + f*x) + a)) + S(24)*I*PolyLog(S(4), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(4)*sqrt(I*a*sinh(e + f*x) + a)), expand=True, _diff=True, _numerical=True)
# assert rubi_test(rubi_integrate(x**S(2)/(I*a*sinh(e + f*x) + a)**(S(3)/2), x), x, x**S(2)*ArcTan(exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f*sqrt(I*a*sinh(e + f*x) + a)) + x**S(2)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(2)*a*f*sqrt(I*a*sinh(e + f*x) + a)) - S(2)*I*x*PolyLog(S(2), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) + S(2)*I*x*PolyLog(S(2), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) + S(2)*x/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) - S(4)*ArcTan(sinh(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(3)*sqrt(I*a*sinh(e + f*x) + a)) + S(4)*I*PolyLog(S(3), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(3)*sqrt(I*a*sinh(e + f*x) + a)) - S(4)*I*PolyLog(S(3), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(3)*sqrt(I*a*sinh(e + f*x) + a)), expand=True, _diff=True, _numerical=True)
# assert rubi_test(rubi_integrate(x/(I*a*sinh(e + f*x) + a)**(S(3)/2), x), x, x*ArcTan(exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f*sqrt(I*a*sinh(e + f*x) + a)) + x*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(2)*a*f*sqrt(I*a*sinh(e + f*x) + a)) - I*PolyLog(S(2), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) + I*PolyLog(S(2), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) + S(1)/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)), expand=True, _diff=True, _numerical=True)
'''
assert rubi_test(rubi_integrate(S(1)/(x*(I*a*sinh(e + f*x) + a)**(S(3)/2)), x), x, Integrate(S(1)/(x*(I*a*sinh(e + f*x) + a)**(S(3)/2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/(x**S(2)*(I*a*sinh(e + f*x) + a)**(S(3)/2)), x), x, Integrate(S(1)/(x**S(2)*(I*a*sinh(e + f*x) + a)**(S(3)/2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/(x*(I*a*sinh(c + d*x) + a)**(S(5)/2)), x), x, Integrate(S(1)/(x*(I*a*sinh(c + d*x) + a)**(S(5)/2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)**(S(1)/3)/x, x), x, Integrate((I*a*sinh(e + f*x) + a)**(S(1)/3)/x, x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*(I*a*sinh(e + f*x) + a)**n, x), x, Integrate((c + d*x)**m*(I*a*sinh(e + f*x) + a)**n, x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*(I*a*sinh(e + f*x) + a)**S(3), x), x, -S(3)*S(2)**(-m + S(-3))*a**S(3)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(2)*f*(c + d*x)/d)*exp(-S(2)*c*f/d + S(2)*e)/f + S(3)*S(2)**(-m + S(-3))*a**S(3)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(2)*f*(c + d*x)/d)*exp(S(2)*c*f/d - S(2)*e)/f - S(3)**(-m + S(-1))*I*a**S(3)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(3)*f*(c + d*x)/d)*exp(-S(3)*c*f/d + S(3)*e)/(S(8)*f) - S(3)**(-m + S(-1))*I*a**S(3)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(3)*f*(c + d*x)/d)*exp(S(3)*c*f/d - S(3)*e)/(S(8)*f) + S(15)*I*a**S(3)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/(S(8)*f) + S(15)*I*a**S(3)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/(S(8)*f) + S(5)*a**S(3)*(c + d*x)**(m + S(1))/(S(2)*d*(m + S(1))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*(I*a*sinh(e + f*x) + a)**S(2), x), x, -S(2)**(-m + S(-3))*a**S(2)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(2)*f*(c + d*x)/d)*exp(-S(2)*c*f/d + S(2)*e)/f + S(2)**(-m + S(-3))*a**S(2)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(2)*f*(c + d*x)/d)*exp(S(2)*c*f/d - S(2)*e)/f + I*a**S(2)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/f + I*a**S(2)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/f + S(3)*a**S(2)*(c + d*x)**(m + S(1))/(S(2)*d*(m + S(1))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*(I*a*sinh(e + f*x) + a), x), x, I*a*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/(S(2)*f) + I*a*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/(S(2)*f) + a*(c + d*x)**(m + S(1))/(d*(m + S(1))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m/(I*a*sinh(e + f*x) + a), x), x, Integrate((c + d*x)**m/cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2), x)/(S(2)*a), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m/(I*a*sinh(e + f*x) + a)**S(2), x), x, Integrate((c + d*x)**m/cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(4), x)/(S(4)*a**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))*(c + d*x)**S(3), x), x, a*(c + d*x)**S(4)/(S(4)*d) - S(6)*b*d**S(3)*sinh(e + f*x)/f**S(4) + S(6)*b*d**S(2)*(c + d*x)*cosh(e + f*x)/f**S(3) - S(3)*b*d*(c + d*x)**S(2)*sinh(e + f*x)/f**S(2) + b*(c + d*x)**S(3)*cosh(e + f*x)/f, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))*(c + d*x)**S(2), x), x, a*(c + d*x)**S(3)/(S(3)*d) + S(2)*b*d**S(2)*cosh(e + f*x)/f**S(3) - S(2)*b*d*(c + d*x)*sinh(e + f*x)/f**S(2) + b*(c + d*x)**S(2)*cosh(e + f*x)/f, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))*(c + d*x), x), x, a*(c + d*x)**S(2)/(S(2)*d) - b*d*sinh(e + f*x)/f**S(2) + b*(c + d*x)*cosh(e + f*x)/f, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))/(c + d*x), x), x, a*log(c + d*x)/d + b*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d + b*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))/(c + d*x)**S(2), x), x, -a/(d*(c + d*x)) - b*sinh(e + f*x)/(d*(c + d*x)) + b*f*CoshIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d**S(2) + b*f*SinhIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))/(c + d*x)**S(3), x), x, -a/(S(2)*d*(c + d*x)**S(2)) - b*sinh(e + f*x)/(S(2)*d*(c + d*x)**S(2)) - b*f*cosh(e + f*x)/(S(2)*d**S(2)*(c + d*x)) + b*f**S(2)*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/(S(2)*d**S(3)) + b*f**S(2)*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/(S(2)*d**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)*(c + d*x)**S(3), x), x, a**S(2)*(c + d*x)**S(4)/(S(4)*d) - S(12)*a*b*d**S(3)*sinh(e + f*x)/f**S(4) + S(12)*a*b*d**S(2)*(c + d*x)*cosh(e + f*x)/f**S(3) - S(6)*a*b*d*(c + d*x)**S(2)*sinh(e + f*x)/f**S(2) + S(2)*a*b*(c + d*x)**S(3)*cosh(e + f*x)/f - S(3)*b**S(2)*c*d**S(2)*x/(S(4)*f**S(2)) - S(3)*b**S(2)*d**S(3)*x**S(2)/(S(8)*f**S(2)) - S(3)*b**S(2)*d**S(3)*sinh(e + f*x)**S(2)/(S(8)*f**S(4)) + S(3)*b**S(2)*d**S(2)*(c + d*x)*sinh(e + f*x)*cosh(e + f*x)/(S(4)*f**S(3)) - S(3)*b**S(2)*d*(c + d*x)**S(2)*sinh(e + f*x)**S(2)/(S(4)*f**S(2)) + b**S(2)*(c + d*x)**S(3)*sinh(e + f*x)*cosh(e + f*x)/(S(2)*f) - b**S(2)*(c + d*x)**S(4)/(S(8)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)*(c + d*x)**S(2), x), x, a**S(2)*(c + d*x)**S(3)/(S(3)*d) + S(4)*a*b*d**S(2)*cosh(e + f*x)/f**S(3) - S(4)*a*b*d*(c + d*x)*sinh(e + f*x)/f**S(2) + S(2)*a*b*(c + d*x)**S(2)*cosh(e + f*x)/f - b**S(2)*d**S(2)*x/(S(4)*f**S(2)) + b**S(2)*d**S(2)*sinh(e + f*x)*cosh(e + f*x)/(S(4)*f**S(3)) - b**S(2)*d*(c + d*x)*sinh(e + f*x)**S(2)/(S(2)*f**S(2)) + b**S(2)*(c + d*x)**S(2)*sinh(e + f*x)*cosh(e + f*x)/(S(2)*f) - b**S(2)*(c + d*x)**S(3)/(S(6)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)*(c + d*x), x), x, a**S(2)*(c + d*x)**S(2)/(S(2)*d) - S(2)*a*b*d*sinh(e + f*x)/f**S(2) + S(2)*a*b*(c + d*x)*cosh(e + f*x)/f - b**S(2)*c*x/S(2) - b**S(2)*d*x**S(2)/S(4) - b**S(2)*d*sinh(e + f*x)**S(2)/(S(4)*f**S(2)) + b**S(2)*(c + d*x)*sinh(e + f*x)*cosh(e + f*x)/(S(2)*f), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)/(c + d*x), x), x, a**S(2)*log(c + d*x)/d + S(2)*a*b*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d + S(2)*a*b*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d + b**S(2)*CoshIntegral(S(2)*c*f/d + S(2)*f*x)*cosh(-S(2)*c*f/d + S(2)*e)/(S(2)*d) + b**S(2)*SinhIntegral(S(2)*c*f/d + S(2)*f*x)*sinh(-S(2)*c*f/d + S(2)*e)/(S(2)*d) - b**S(2)*log(c + d*x)/(S(2)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)/(c + d*x)**S(2), x), x, -a**S(2)/(d*(c + d*x)) - S(2)*a*b*sinh(e + f*x)/(d*(c + d*x)) + S(2)*a*b*f*CoshIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d**S(2) + S(2)*a*b*f*SinhIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d**S(2) - b**S(2)*sinh(e + f*x)**S(2)/(d*(c + d*x)) + b**S(2)*f*CoshIntegral(S(2)*c*f/d + S(2)*f*x)*sinh(-S(2)*c*f/d + S(2)*e)/d**S(2) + b**S(2)*f*SinhIntegral(S(2)*c*f/d + S(2)*f*x)*cosh(-S(2)*c*f/d + S(2)*e)/d**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)/(c + d*x)**S(3), x), x, -a**S(2)/(S(2)*d*(c + d*x)**S(2)) - a*b*sinh(e + f*x)/(d*(c + d*x)**S(2)) - a*b*f*cosh(e + f*x)/(d**S(2)*(c + d*x)) + a*b*f**S(2)*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d**S(3) + a*b*f**S(2)*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d**S(3) - b**S(2)*sinh(e + f*x)**S(2)/(S(2)*d*(c + d*x)**S(2)) - b**S(2)*f*sinh(e + f*x)*cosh(e + f*x)/(d**S(2)*(c + d*x)) + b**S(2)*f**S(2)*CoshIntegral(S(2)*c*f/d + S(2)*f*x)*cosh(-S(2)*c*f/d + S(2)*e)/d**S(3) + b**S(2)*f**S(2)*SinhIntegral(S(2)*c*f/d + S(2)*f*x)*sinh(-S(2)*c*f/d + S(2)*e)/d**S(3), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/(a + b*sinh(e + f*x)), x), x, S(6)*d**S(3)*PolyLog(S(4), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(4)*sqrt(a**S(2) + b**S(2))) - S(6)*d**S(3)*PolyLog(S(4), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(4)*sqrt(a**S(2) + b**S(2))) - S(6)*d**S(2)*(c + d*x)*PolyLog(S(3), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(3)*sqrt(a**S(2) + b**S(2))) + S(6)*d**S(2)*(c + d*x)*PolyLog(S(3), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(3)*sqrt(a**S(2) + b**S(2))) + S(3)*d*(c + d*x)**S(2)*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(2)*sqrt(a**S(2) + b**S(2))) - S(3)*d*(c + d*x)**S(2)*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(2)*sqrt(a**S(2) + b**S(2))) + (c + d*x)**S(3)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f*sqrt(a**S(2) + b**S(2))) - (c + d*x)**S(3)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f*sqrt(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/(a + b*sinh(e + f*x)), x), x, -S(2)*d**S(2)*PolyLog(S(3), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(3)*sqrt(a**S(2) + b**S(2))) + S(2)*d**S(2)*PolyLog(S(3), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(3)*sqrt(a**S(2) + b**S(2))) + S(2)*d*(c + d*x)*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(2)*sqrt(a**S(2) + b**S(2))) - S(2)*d*(c + d*x)*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(2)*sqrt(a**S(2) + b**S(2))) + (c + d*x)**S(2)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f*sqrt(a**S(2) + b**S(2))) - (c + d*x)**S(2)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f*sqrt(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/(a + b*sinh(e + f*x)), x), x, d*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(2)*sqrt(a**S(2) + b**S(2))) - d*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(2)*sqrt(a**S(2) + b**S(2))) + (c + d*x)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f*sqrt(a**S(2) + b**S(2))) - (c + d*x)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f*sqrt(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((a + b*sinh(e + f*x))*(c + d*x)), x), x, Integrate(S(1)/((a + b*sinh(e + f*x))*(c + d*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((a + b*sinh(e + f*x))*(c + d*x)**S(2)), x), x, Integrate(S(1)/((a + b*sinh(e + f*x))*(c + d*x)**S(2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/(a + b*sinh(e + f*x))**S(2), x), x, S(6)*a*d**S(3)*PolyLog(S(4), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(4)*(a**S(2) + b**S(2))**(S(3)/2)) - S(6)*a*d**S(3)*PolyLog(S(4), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(4)*(a**S(2) + b**S(2))**(S(3)/2)) - S(6)*a*d**S(2)*(c + d*x)*PolyLog(S(3), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))**(S(3)/2)) + S(6)*a*d**S(2)*(c + d*x)*PolyLog(S(3), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))**(S(3)/2)) + S(3)*a*d*(c + d*x)**S(2)*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) - S(3)*a*d*(c + d*x)**S(2)*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) + a*(c + d*x)**S(3)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f*(a**S(2) + b**S(2))**(S(3)/2)) - a*(c + d*x)**S(3)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f*(a**S(2) + b**S(2))**(S(3)/2)) - b*(c + d*x)**S(3)*cosh(e + f*x)/(f*(a + b*sinh(e + f*x))*(a**S(2) + b**S(2))) - S(6)*d**S(3)*PolyLog(S(3), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(4)*(a**S(2) + b**S(2))) - S(6)*d**S(3)*PolyLog(S(3), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(4)*(a**S(2) + b**S(2))) + S(6)*d**S(2)*(c + d*x)*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))) + S(6)*d**S(2)*(c + d*x)*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))) + S(3)*d*(c + d*x)**S(2)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f**S(2)*(a**S(2) + b**S(2))) + S(3)*d*(c + d*x)**S(2)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f**S(2)*(a**S(2) + b**S(2))) - (c + d*x)**S(3)/(f*(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/(a + b*sinh(e + f*x))**S(2), x), x, -S(2)*a*d**S(2)*PolyLog(S(3), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))**(S(3)/2)) + S(2)*a*d**S(2)*PolyLog(S(3), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))**(S(3)/2)) + S(2)*a*d*(c + d*x)*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) - S(2)*a*d*(c + d*x)*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) + a*(c + d*x)**S(2)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f*(a**S(2) + b**S(2))**(S(3)/2)) - a*(c + d*x)**S(2)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f*(a**S(2) + b**S(2))**(S(3)/2)) - b*(c + d*x)**S(2)*cosh(e + f*x)/(f*(a + b*sinh(e + f*x))*(a**S(2) + b**S(2))) + S(2)*d**S(2)*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))) + S(2)*d**S(2)*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))) + S(2)*d*(c + d*x)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f**S(2)*(a**S(2) + b**S(2))) + S(2)*d*(c + d*x)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f**S(2)*(a**S(2) + b**S(2))) - (c + d*x)**S(2)/(f*(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/(a + b*sinh(e + f*x))**S(2), x), x, a*d*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) - a*d*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) + a*(c + d*x)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f*(a**S(2) + b**S(2))**(S(3)/2)) - a*(c + d*x)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f*(a**S(2) + b**S(2))**(S(3)/2)) - b*(c + d*x)*cosh(e + f*x)/(f*(a + b*sinh(e + f*x))*(a**S(2) + b**S(2))) + d*log(a + b*sinh(e + f*x))/(f**S(2)*(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((a + b*sinh(e + f*x))**S(2)*(c + d*x)), x), x, Integrate(S(1)/((a + b*sinh(e + f*x))**S(2)*(c + d*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((a + b*sinh(e + f*x))**S(2)*(c + d*x)**S(2)), x), x, Integrate(S(1)/((a + b*sinh(e + f*x))**S(2)*(c + d*x)**S(2)), x), expand=True, _diff=True, _numerical=True)
# long time assert rubi_test(rubi_integrate((e + f*x)**S(2)/(a + b*sinh(c + d*x))**S(3), x), x, S(3)*a**S(2)*(e + f*x)**S(2)*log(b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a**S(2)*(e + f*x)**S(2)*log(b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(5)/2)) + S(3)*a**S(2)*f*(e + f*x)*PolyLog(S(2), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(d**S(2)*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a**S(2)*f*(e + f*x)*PolyLog(S(2), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(d**S(2)*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a**S(2)*f**S(2)*PolyLog(S(3), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(d**S(3)*(a**S(2) + b**S(2))**(S(5)/2)) + S(3)*a**S(2)*f**S(2)*PolyLog(S(3), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(d**S(3)*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a*b*(e + f*x)**S(2)*cosh(c + d*x)/(S(2)*d*(a + b*sinh(c + d*x))*(a**S(2) + b**S(2))**S(2)) - S(3)*a*(e + f*x)**S(2)/(S(2)*d*(a**S(2) + b**S(2))**S(2)) + S(3)*a*f*(e + f*x)*log(b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(d**S(2)*(a**S(2) + b**S(2))**S(2)) + S(3)*a*f*(e + f*x)*log(b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(d**S(2)*(a**S(2) + b**S(2))**S(2)) + S(3)*a*f**S(2)*PolyLog(S(2), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(d**S(3)*(a**S(2) + b**S(2))**S(2)) + S(3)*a*f**S(2)*PolyLog(S(2), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(d**S(3)*(a**S(2) + b**S(2))**S(2)) - b*(e + f*x)**S(2)*cosh(c + d*x)/(S(2)*d*(a + b*sinh(c + d*x))**S(2)*(a**S(2) + b**S(2))) - (e + f*x)**S(2)*log(b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(3)/2)) + (e + f*x)**S(2)*log(b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(3)/2)) - f*(e + f*x)*PolyLog(S(2), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(d**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) + f*(e + f*x)*PolyLog(S(2), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(d**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) - f*(e + f*x)/(d**S(2)*(a + b*sinh(c + d*x))*(a**S(2) + b**S(2))) + f**S(2)*PolyLog(S(3), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(d**S(3)*(a**S(2) + b**S(2))**(S(3)/2)) - f**S(2)*PolyLog(S(3), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(d**S(3)*(a**S(2) + b**S(2))**(S(3)/2)) - S(2)*f**S(2)*atanh((-a*tanh(c/S(2) + d*x/S(2)) + b)/sqrt(a**S(2) + b**S(2)))/(d**S(3)*(a**S(2) + b**S(2))**(S(3)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((e + f*x)/(a + b*sinh(c + d*x))**S(3), x), x, S(3)*a**S(2)*(e + f*x)*log(b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a**S(2)*(e + f*x)*log(b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(5)/2)) + S(3)*a**S(2)*f*PolyLog(S(2), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(S(2)*d**S(2)*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a**S(2)*f*PolyLog(S(2), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(S(2)*d**S(2)*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a*b*(e + f*x)*cosh(c + d*x)/(S(2)*d*(a + b*sinh(c + d*x))*(a**S(2) + b**S(2))**S(2)) + S(3)*a*f*log(a + b*sinh(c + d*x))/(S(2)*d**S(2)*(a**S(2) + b**S(2))**S(2)) - b*(e + f*x)*cosh(c + d*x)/(S(2)*d*(a + b*sinh(c + d*x))**S(2)*(a**S(2) + b**S(2))) - (e + f*x)*log(b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(3)/2)) + (e + f*x)*log(b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(3)/2)) - f*PolyLog(S(2), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(S(2)*d**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) + f*PolyLog(S(2), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(S(2)*d**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) - f/(S(2)*d**S(2)*(a + b*sinh(c + d*x))*(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((a + b*sinh(c + d*x))**S(3)*(e + f*x)), x), x, Integrate(S(1)/((a + b*sinh(c + d*x))**S(3)*(e + f*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((a + b*sinh(c + d*x))**S(3)*(e + f*x)**S(2)), x), x, Integrate(S(1)/((a + b*sinh(c + d*x))**S(3)*(e + f*x)**S(2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**n*(c + d*x)**m, x), x, Integrate((a + b*sinh(e + f*x))**n*(c + d*x)**m, x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(3)*(c + d*x)**m, x), x, S(3)*S(2)**(-m + S(-3))*a*b**S(2)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(2)*f*(c + d*x)/d)*exp(-S(2)*c*f/d + S(2)*e)/f - S(3)*S(2)**(-m + S(-3))*a*b**S(2)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(2)*f*(c + d*x)/d)*exp(S(2)*c*f/d - S(2)*e)/f + S(3)**(-m + S(-1))*b**S(3)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(3)*f*(c + d*x)/d)*exp(-S(3)*c*f/d + S(3)*e)/(S(8)*f) + S(3)**(-m + S(-1))*b**S(3)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(3)*f*(c + d*x)/d)*exp(S(3)*c*f/d - S(3)*e)/(S(8)*f) + a**S(3)*(c + d*x)**(m + S(1))/(d*(m + S(1))) + S(3)*a**S(2)*b*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/(S(2)*f) + S(3)*a**S(2)*b*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/(S(2)*f) - S(3)*a*b**S(2)*(c + d*x)**(m + S(1))/(S(2)*d*(m + S(1))) - S(3)*b**S(3)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/(S(8)*f) - S(3)*b**S(3)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/(S(8)*f), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)*(c + d*x)**m, x), x, S(2)**(-m + S(-3))*b**S(2)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(2)*f*(c + d*x)/d)*exp(-S(2)*c*f/d + S(2)*e)/f - S(2)**(-m + S(-3))*b**S(2)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(2)*f*(c + d*x)/d)*exp(S(2)*c*f/d - S(2)*e)/f + a**S(2)*(c + d*x)**(m + S(1))/(d*(m + S(1))) + a*b*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/f + a*b*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/f - b**S(2)*(c + d*x)**(m + S(1))/(S(2)*d*(m + S(1))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))*(c + d*x)**m, x), x, a*(c + d*x)**(m + S(1))/(d*(m + S(1))) + b*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/(S(2)*f) + b*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/(S(2)*f), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m/(a + b*sinh(e + f*x)), x), x, Integrate((c + d*x)**m/(a + b*sinh(e + f*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m/(a + b*sinh(e + f*x))**S(2), x), x, Integrate((c + d*x)**m/(a + b*sinh(e + f*x))**S(2), x), expand=True, _diff=True, _numerical=True)
| 251.514107
| 2,469
| 0.511161
| 19,581
| 80,233
| 2.059599
| 0.030233
| 0.083959
| 0.04746
| 0.025391
| 0.825709
| 0.819435
| 0.813831
| 0.808351
| 0.797838
| 0.785068
| 0
| 0.049775
| 0.117844
| 80,233
| 318
| 2,470
| 252.305031
| 0.52002
| 0.050765
| 0
| 0.006711
| 0
| 0.003356
| 0.001145
| 0
| 0
| 0
| 0
| 0
| 0.540268
| 1
| 0.003356
| false
| 0
| 0.073826
| 0
| 0.077181
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5010b73121964b1e2039f7c97a78c65b09242001
| 21,089
|
py
|
Python
|
CalibTracker/SiStripChannelGain/test/Cosmic_B38/InputFiles_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
CalibTracker/SiStripChannelGain/test/Cosmic_B38/InputFiles_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
CalibTracker/SiStripChannelGain/test/Cosmic_B38/InputFiles_cff.py
|
ckamtsikis/cmssw
|
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/80C4285C-779E-DD11-9889-001617E30CA4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/A83BF5EE-6E9E-DD11-8082-000423D94700.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/8266853E-999E-DD11-8B73-001D09F2432B.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/2AAFE9A9-A19E-DD11-821B-000423D99F3E.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/067E98F3-489F-DD11-B309-000423D996B4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/64C1D6F5-489F-DD11-90B7-000423D986A8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0012/3C084F93-679C-DD11-A361-000423D9989E.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/9C14E69F-069D-DD11-AC41-001617DBCF1E.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/5439B4CA-309D-DD11-84E5-000423D944F8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/D20BB375-AE9D-DD11-BF49-000423D944FC.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/E2E8FE03-A69D-DD11-8699-000423D98750.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/B40C29EC-B69D-DD11-A665-000423D6A6F4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/843C7874-1F9F-DD11-8E03-000423D98804.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/7EB3DF8E-0E9F-DD11-A451-001D09F29146.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/CEB001F9-169F-DD11-A5E6-000423D94494.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/382BAEE2-D39E-DD11-A0A4-000423D98EC8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/CC5B37A1-A99E-DD11-816F-001617DBD230.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/6EDD168B-2F9F-DD11-ADF5-001617C3B79A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/EE4B4C82-999C-DD11-86EC-000423D99F3E.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/1CC8332F-459E-DD11-BFE1-001617C3B65A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/7A6A133C-999E-DD11-9155-001D09F2462D.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/F292BE7F-409F-DD11-883A-001617C3B6FE.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/B870AA81-409F-DD11-B549-001617C3B78C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/9003F328-899C-DD11-83D7-000423D986C4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/500B13D3-6F9C-DD11-8745-001617DC1F70.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/4CBAEDCC-309D-DD11-A617-001617E30D06.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/AED19458-399D-DD11-B9AC-000423D9A2AE.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/A6688D1F-959D-DD11-B5B7-000423D6A6F4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/F0076F20-F59E-DD11-8B57-000423D944F0.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/EC6C6EA4-499D-DD11-AC7D-000423D98DB4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/DA099105-639D-DD11-9C3E-001617E30F50.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/2E40EDED-1A9E-DD11-9014-001617DBD5AC.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/7647004C-F19D-DD11-8BAA-001617DBD224.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/38881706-5E9E-DD11-B487-000423D98868.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/1098901B-569E-DD11-BE60-000423D985E4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/5E4E7508-919C-DD11-AEB1-000423D9853C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/060DD475-179D-DD11-A003-000423D94908.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/8A563E55-289D-DD11-BA24-000423D6BA18.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/545F9B54-D09D-DD11-A58B-000423D6B5C4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/68795DEE-D79D-DD11-ADB7-000423D98DB4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/3AD49E1B-F59E-DD11-81C4-000423D94700.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/548891AB-8C9D-DD11-8989-001617C3B69C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/745CD91D-529D-DD11-8908-000423D6B48C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/3EF1CC87-2F9F-DD11-9EFC-001617DF785A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/FCB4F2BA-3C9E-DD11-82C7-000423D99160.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/ECC4D018-569E-DD11-80C4-001617C3B6FE.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/20C97175-669E-DD11-8ADD-00161757BF42.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/52683098-A99E-DD11-BCD0-000423D94AA8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/F6C17BA7-A19E-DD11-B57C-000423D98634.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/D844B765-519F-DD11-96F9-001617E30D0A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/02EB3FD3-6F9C-DD11-8C35-001617C3B6FE.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/0EB355C8-309D-DD11-85B7-001617C3B64C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/8E478481-BA9E-DD11-9573-000423D6B358.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/A4775BE3-739D-DD11-843D-001617C3B778.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/8E8B21C6-F99D-DD11-BF05-000423D986A8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/0EF20D52-139E-DD11-9473-000423D6B5C4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/7C00B404-389F-DD11-AB81-000423D985E4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/AE67CFF1-279F-DD11-B6DC-000423D98804.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/7400A101-389F-DD11-B540-000423D60FF6.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/2A630CF2-279F-DD11-942A-000423D985E4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/1CD3DEA6-F59C-DD11-986D-000423D98BC4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/D809EECD-7F9E-DD11-B4D7-00161757BF42.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/64451F65-779E-DD11-869D-001617E30D40.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/BA532E6C-519F-DD11-8DE7-000423D98FBC.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/021AEBFE-7A9F-DD11-863E-0019DB29C620.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/CA5A90F6-489F-DD11-8F60-000423D6B2D8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/028578E0-809C-DD11-AF7D-001617C3B6E8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0012/08A6038E-679C-DD11-A4B9-001617E30D0A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0012/18BA3290-679C-DD11-B9A1-001617C3B77C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/B0CD45DB-D39E-DD11-BC03-000423D985E4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/125FE86B-CB9E-DD11-B054-000423DD2F34.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/6E783236-849D-DD11-A9FF-001617C3B654.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/800FA4BD-5A9D-DD11-ACBB-001617DBD5AC.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/760FB963-7C9D-DD11-B812-001D09F231C9.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/52CBD0DE-0A9E-DD11-B583-000423D6B358.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/905B1953-349E-DD11-8022-001D09F2AD7F.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/7A7A6D05-389F-DD11-9D08-000423D98804.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/C223029D-E59C-DD11-A125-001617E30D40.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/3293D2A6-4D9E-DD11-81D1-000423D98B5C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/4E5AEDFC-5D9E-DD11-BD7D-001617C3B5F4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/2A9CA4B8-909E-DD11-857B-001617E30D38.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/9E30F47D-409F-DD11-A947-001617C3B6E8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/745BB069-519F-DD11-A8F9-000423D94700.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/4493CD28-899C-DD11-AF14-000423D6CA02.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/0085D14F-289D-DD11-862E-000423D6006E.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/841A2D63-E09D-DD11-BDA5-001617DF785A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/5658C98B-9D9D-DD11-9B46-000423D99F1E.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/3ABEFDFB-169F-DD11-94E3-000423D98BC4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/FC20EEFC-059F-DD11-A7CA-001617C3B5F4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/CE7B883A-ED9E-DD11-A737-0019DB29C614.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/4261BA6C-CB9E-DD11-AE94-000423D986A8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/9A134C3C-849D-DD11-8A1C-000423D98C20.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/FE7A7A73-1F9F-DD11-A841-001617DBD230.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/6097BB22-FE9C-DD11-AA3C-000423D944F0.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/F4AE9DE3-DC9C-DD11-9223-000423D6B42C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/3CA664CA-309D-DD11-A642-000423D951D4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/2C9D3EE0-C79D-DD11-AAF0-000423D94534.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/7E81C76A-BF9D-DD11-9970-001617E30F50.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/4EA98C8F-0E9F-DD11-A48E-001D09F253FC.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/0A4BBAF5-C29E-DD11-967D-0016177CA778.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/4253601B-B29E-DD11-9725-001617DBD224.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/E00BC4D5-D39E-DD11-861A-001617C3B5E4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/EA733333-419D-DD11-9B49-000423D99660.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/40A87664-239E-DD11-8ABC-000423D944F8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/DA448F60-239E-DD11-8347-000423D98DD4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/0E9D6303-389F-DD11-8C22-001617E30D0A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/8A40053E-889E-DD11-9442-000423D944F0.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/082ED767-999E-DD11-962C-0019B9F70607.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/205DAE07-0F9D-DD11-9FD4-000423D9890C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/041B05FD-059F-DD11-871E-001617E30D52.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/FE7823F2-C29E-DD11-81F1-0019DB29C614.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/BC834BD5-2B9E-DD11-A8D9-001617C3B706.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/966DFADC-E89D-DD11-A90E-000423D99264.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/B4FD3F7C-409F-DD11-8F2B-001617DBCF90.root'
| 183.382609
| 184
| 0.843283
| 2,736
| 21,089
| 6.375
| 0.149488
| 0.078431
| 0.117647
| 0.143791
| 0.829148
| 0.829148
| 0.829148
| 0.829148
| 0.829148
| 0.829148
| 0
| 0.165101
| 0.005406
| 21,089
| 114
| 185
| 184.991228
| 0.66646
| 0
| 0
| 0
| 0
| 1
| 0.978425
| 0.978425
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
ac95238747ed70d5ab97026ac30760503ed60208
| 24,199
|
py
|
Python
|
src/testing/TestON/bin/nec.py
|
securedataplane/preacher
|
2f76581de47036e79cd6e1183948c88b35ce4950
|
[
"MIT"
] | 1
|
2020-07-23T08:06:44.000Z
|
2020-07-23T08:06:44.000Z
|
src/testing/TestON/bin/nec.py
|
securedataplane/preacher
|
2f76581de47036e79cd6e1183948c88b35ce4950
|
[
"MIT"
] | null | null | null |
src/testing/TestON/bin/nec.py
|
securedataplane/preacher
|
2f76581de47036e79cd6e1183948c88b35ce4950
|
[
"MIT"
] | null | null | null |
class NEC:
def __init__( self ):
self.prompt = '(.*)'
self.timeout = 60
def show(self, *options, **def_args ):
'''Possible Options :[' access-filter ', ' accounting ', ' acknowledgments ', ' auto-config ', ' axrp ', ' cfm ', ' channel-group ', ' clock ', ' config-lock-status ', ' cpu ', ' dhcp ', ' dot1x ', ' dumpfile ', ' efmoam ', ' environment ', ' file ', ' flash ', ' gsrp ', ' history ', ' igmp-snooping ', ' interfaces ', ' ip ', ' ip-dual ', ' ipv6-dhcp ', ' license ', ' lldp ', ' logging ', ' loop-detection ', ' mac-address-table ', ' mc ', ' memory ', ' mld-snooping ', ' netconf ', ' netstat ', ' ntp ', ' oadp ', ' openflow ', ' port ', ' power ', ' processes ', ' qos ', ' qos-flow ', ' sessions ', ' sflow ', ' spanning-tree ', ' ssh ', ' system ', ' tcpdump ', ' tech-support ', ' track ', ' version ', ' vlan ', ' vrrpstatus ', ' whoami ']'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_ip(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show ip "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_mc(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show mc "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_cfm(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show cfm "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_ntp(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show ntp "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_ssh(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show ssh "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_qos(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show qos "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_cpu(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show cpu "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_vlan(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show vlan "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_lldp(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show lldp "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_dhcp(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show dhcp "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_axrp(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show axrp "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_oadp(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show oadp "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_gsrp(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show gsrp "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_port(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show port "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_file(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show file "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_power(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show power "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_clock(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show clock "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_dot1x(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show dot1x "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_sflow(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show sflow "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_track(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show track "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_flash(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show flash "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_system(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show system "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_whoami(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show whoami "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_efmoam(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show efmoam "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_memory(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show memory "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_tcpdump(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show tcpdump "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_history(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show history "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_logging(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show logging "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_license(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show license "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_netstat(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show netstat "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_version(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show version "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_netconf(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show netconf "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_ipdual(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show ip-dual "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_sessions(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show sessions "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_qosflow(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show qos-flow "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_openflow(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show openflow "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_dumpfile(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show dumpfile "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_ipv6dhcp(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show ipv6-dhcp "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_processes(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show processes "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_vrrpstatus(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show vrrpstatus "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_interfaces(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show interfaces "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_environment(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show environment "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_autoconfig(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show auto-config "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_techsupport(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show tech-support "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_mldsnooping(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show mld-snooping "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_igmpsnooping(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show igmp-snooping "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_channelgroup(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show channel-group "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_spanningtree(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show spanning-tree "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_loopdetection(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show loop-detection "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_acknowledgments(self, *options, **def_args ):
'''Possible Options :[' interface ']'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show acknowledgments "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_macaddresstable(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show mac-address-table "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_configlockstatus(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show config-lock-status "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
def show_acknowledgments_interface(self, *options, **def_args ):
'''Possible Options :[]'''
arguments= ''
for option in options:
arguments = arguments + option +' '
prompt = def_args.setdefault('prompt',self.prompt)
timeout = def_args.setdefault('timeout',self.timeout)
self.execute( cmd= "show acknowledgments interface "+ arguments, prompt = prompt, timeout = timeout )
return main.TRUE
| 44.320513
| 867
| 0.592793
| 2,470
| 24,199
| 5.718623
| 0.040081
| 0.080283
| 0.129982
| 0.068814
| 0.919858
| 0.919858
| 0.915186
| 0.915186
| 0.911717
| 0.911717
| 0
| 0.000458
| 0.278193
| 24,199
| 545
| 868
| 44.401835
| 0.80821
| 0.081863
| 0
| 0.743119
| 0
| 0
| 0.077645
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.126147
| false
| 0
| 0
| 0
| 0.252294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
acff916c9c13ec45d8705a7c78687da27d11f532
| 92
|
py
|
Python
|
parameters_8560.py
|
ksuhr1/CMPS183-hw3
|
d0450827912b7ec355a9e433c0c7e33d1b2610a0
|
[
"BSD-3-Clause"
] | null | null | null |
parameters_8560.py
|
ksuhr1/CMPS183-hw3
|
d0450827912b7ec355a9e433c0c7e33d1b2610a0
|
[
"BSD-3-Clause"
] | null | null | null |
parameters_8560.py
|
ksuhr1/CMPS183-hw3
|
d0450827912b7ec355a9e433c0c7e33d1b2610a0
|
[
"BSD-3-Clause"
] | null | null | null |
password="pbkdf2(1000,20,sha512)$b24904a15adb4514$85f395bc9c1f6be8227d9f7540e54127cd4f0fdf"
| 46
| 91
| 0.891304
| 7
| 92
| 11.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.494505
| 0.01087
| 92
| 1
| 92
| 92
| 0.406593
| 0
| 0
| 0
| 0
| 0
| 0.869565
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
4a0c17dcadf4678d289fa2db3d23b33230f5a519
| 2,303
|
py
|
Python
|
example/django_example/polls/tests.py
|
dmsimard/dynaconf
|
ec394ab07e3b522879c8be678c65ebeb05fc2b59
|
[
"MIT"
] | null | null | null |
example/django_example/polls/tests.py
|
dmsimard/dynaconf
|
ec394ab07e3b522879c8be678c65ebeb05fc2b59
|
[
"MIT"
] | null | null | null |
example/django_example/polls/tests.py
|
dmsimard/dynaconf
|
ec394ab07e3b522879c8be678c65ebeb05fc2b59
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from django.test import TestCase
# Create your tests here.
class SettingsTest(TestCase):
def test_settings(self):
self.assertEqual(settings.SERVER, 'prodserver.com')
self.assertEqual(
settings.STATIC_URL, '/changed/in/settings.toml/by/dynaconf/')
self.assertEqual(settings.USERNAME, 'admin_user_from_env')
self.assertEqual(settings.PASSWORD, 'My5up3r53c4et')
self.assertEqual(settings.get('PASSWORD'), 'My5up3r53c4et')
self.assertEqual(settings.FOO, 'It overrides every other env')
with settings.using_env('development'):
self.assertEqual(settings.SERVER, 'devserver.com')
self.assertEqual(settings.PASSWORD, False)
self.assertEqual(settings.USERNAME, 'admin_user_from_env')
self.assertEqual(settings.FOO, 'It overrides every other env')
self.assertEqual(settings.SERVER, 'prodserver.com')
self.assertEqual(settings.PASSWORD, 'My5up3r53c4et')
self.assertEqual(settings.USERNAME, 'admin_user_from_env')
self.assertEqual(settings.FOO, 'It overrides every other env')
with settings.using_env('staging'):
self.assertEqual(settings.SERVER, 'stagingserver.com')
self.assertEqual(settings.PASSWORD, False)
self.assertEqual(settings.USERNAME, 'admin_user_from_env')
self.assertEqual(settings.FOO, 'It overrides every other env')
self.assertEqual(settings.SERVER, 'prodserver.com')
self.assertEqual(settings.PASSWORD, 'My5up3r53c4et')
self.assertEqual(settings.USERNAME, 'admin_user_from_env')
self.assertEqual(settings.FOO, 'It overrides every other env')
with settings.using_env('customenv'):
self.assertEqual(settings.SERVER, 'customserver.com')
self.assertEqual(settings.PASSWORD, False)
self.assertEqual(settings.USERNAME, 'admin_user_from_env')
self.assertEqual(settings.FOO, 'It overrides every other env')
self.assertEqual(settings.SERVER, 'prodserver.com')
self.assertEqual(settings.PASSWORD, 'My5up3r53c4et')
self.assertEqual(settings.USERNAME, 'admin_user_from_env')
self.assertEqual(settings.FOO, 'It overrides every other env')
| 46.06
| 74
| 0.696049
| 249
| 2,303
| 6.333333
| 0.188755
| 0.285352
| 0.43754
| 0.16487
| 0.803424
| 0.790108
| 0.790108
| 0.790108
| 0.762207
| 0.720989
| 0
| 0.013492
| 0.195397
| 2,303
| 49
| 75
| 47
| 0.837561
| 0.009987
| 0
| 0.657895
| 0
| 0
| 0.249781
| 0.016681
| 0
| 0
| 0
| 0
| 0.789474
| 1
| 0.026316
| false
| 0.210526
| 0.052632
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 11
|
c59ff6269abc6e4442e009258e5c25cd74a0c2dd
| 59,325
|
py
|
Python
|
nipyapi/nifi/apis/parameter_contexts_api.py
|
oneextrafact/nipyapi
|
4c184d69002a8ee3ac528fda63b2ffcc6cedbae5
|
[
"Apache-2.0"
] | null | null | null |
nipyapi/nifi/apis/parameter_contexts_api.py
|
oneextrafact/nipyapi
|
4c184d69002a8ee3ac528fda63b2ffcc6cedbae5
|
[
"Apache-2.0"
] | null | null | null |
nipyapi/nifi/apis/parameter_contexts_api.py
|
oneextrafact/nipyapi
|
4c184d69002a8ee3ac528fda63b2ffcc6cedbae5
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
NiFi Rest Api
The Rest Api provides programmatic access to command and control a NiFi instance in real time. Start and stop processors, monitor queues, query provenance data, and more. Each endpoint below includes a description, definitions of the expected input and output, potential response codes, and the authorizations required to invoke each service.
OpenAPI spec version: 1.10.0
Contact: dev@nifi.apache.org
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ParameterContextsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_parameter_context(self, body, **kwargs):
"""
Create a Parameter Context
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_parameter_context(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ParameterContextEntity body: The Parameter Context. (required)
:return: ParameterContextEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_parameter_context_with_http_info(body, **kwargs)
else:
(data) = self.create_parameter_context_with_http_info(body, **kwargs)
return data
def create_parameter_context_with_http_info(self, body, **kwargs):
"""
Create a Parameter Context
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_parameter_context_with_http_info(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ParameterContextEntity body: The Parameter Context. (required)
:return: ParameterContextEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_parameter_context" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_parameter_context`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/parameter-contexts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ParameterContextEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_parameter_context(self, id, **kwargs):
"""
Deletes the Parameter Context with the given ID
Deletes the Parameter Context with the given ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_parameter_context(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The Parameter Context ID. (required)
:param str version: The version is used to verify the client is working with the latest version of the flow.
:param str client_id: If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
:param bool disconnected_node_acknowledged: Acknowledges that this node is disconnected to allow for mutable requests to proceed.
:return: ParameterContextEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_parameter_context_with_http_info(id, **kwargs)
else:
(data) = self.delete_parameter_context_with_http_info(id, **kwargs)
return data
def delete_parameter_context_with_http_info(self, id, **kwargs):
"""
Deletes the Parameter Context with the given ID
Deletes the Parameter Context with the given ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_parameter_context_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The Parameter Context ID. (required)
:param str version: The version is used to verify the client is working with the latest version of the flow.
:param str client_id: If the client id is not specified, a new one will be generated. This value (whether specified or generated) is included in the response.
:param bool disconnected_node_acknowledged: Acknowledges that this node is disconnected to allow for mutable requests to proceed.
:return: ParameterContextEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'version', 'client_id', 'disconnected_node_acknowledged']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_parameter_context" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_parameter_context`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
if 'version' in params:
query_params.append(('version', params['version']))
if 'client_id' in params:
query_params.append(('clientId', params['client_id']))
if 'disconnected_node_acknowledged' in params:
query_params.append(('disconnectedNodeAcknowledged', params['disconnected_node_acknowledged']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/parameter-contexts/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ParameterContextEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_update_request(self, context_id, request_id, **kwargs):
"""
Deletes the Update Request with the given ID
Deletes the Update Request with the given ID. After a request is created via a POST to /nifi-api/parameter-contexts/update-requests, it is expected that the client will properly clean up the request by DELETE'ing it, once the Update process has completed. If the request is deleted before the request completes, then the Update request will finish the step that it is currently performing and then will cancel any subsequent steps.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_update_request(context_id, request_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str context_id: The ID of the ParameterContext (required)
:param str request_id: The ID of the Update Request (required)
:param bool disconnected_node_acknowledged: Acknowledges that this node is disconnected to allow for mutable requests to proceed.
:return: ParameterContextUpdateRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_update_request_with_http_info(context_id, request_id, **kwargs)
else:
(data) = self.delete_update_request_with_http_info(context_id, request_id, **kwargs)
return data
def delete_update_request_with_http_info(self, context_id, request_id, **kwargs):
"""
Deletes the Update Request with the given ID
Deletes the Update Request with the given ID. After a request is created via a POST to /nifi-api/parameter-contexts/update-requests, it is expected that the client will properly clean up the request by DELETE'ing it, once the Update process has completed. If the request is deleted before the request completes, then the Update request will finish the step that it is currently performing and then will cancel any subsequent steps.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_update_request_with_http_info(context_id, request_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str context_id: The ID of the ParameterContext (required)
:param str request_id: The ID of the Update Request (required)
:param bool disconnected_node_acknowledged: Acknowledges that this node is disconnected to allow for mutable requests to proceed.
:return: ParameterContextUpdateRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['context_id', 'request_id', 'disconnected_node_acknowledged']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_update_request" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'context_id' is set
if ('context_id' not in params) or (params['context_id'] is None):
raise ValueError("Missing the required parameter `context_id` when calling `delete_update_request`")
# verify the required parameter 'request_id' is set
if ('request_id' not in params) or (params['request_id'] is None):
raise ValueError("Missing the required parameter `request_id` when calling `delete_update_request`")
collection_formats = {}
path_params = {}
if 'context_id' in params:
path_params['contextId'] = params['context_id']
if 'request_id' in params:
path_params['requestId'] = params['request_id']
query_params = []
if 'disconnected_node_acknowledged' in params:
query_params.append(('disconnectedNodeAcknowledged', params['disconnected_node_acknowledged']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/parameter-contexts/{contextId}/update-requests/{requestId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ParameterContextUpdateRequestEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_validation_request(self, context_id, id, **kwargs):
"""
Deletes the Validation Request with the given ID
Deletes the Validation Request with the given ID. After a request is created via a POST to /nifi-api/validation-contexts, it is expected that the client will properly clean up the request by DELETE'ing it, once the validation process has completed. If the request is deleted before the request completes, then the Validation request will finish the step that it is currently performing and then will cancel any subsequent steps.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_validation_request(context_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str context_id: The ID of the Parameter Context (required)
:param str id: The ID of the Update Request (required)
:param bool disconnected_node_acknowledged: Acknowledges that this node is disconnected to allow for mutable requests to proceed.
:return: ParameterContextValidationRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_validation_request_with_http_info(context_id, id, **kwargs)
else:
(data) = self.delete_validation_request_with_http_info(context_id, id, **kwargs)
return data
def delete_validation_request_with_http_info(self, context_id, id, **kwargs):
"""
Deletes the Validation Request with the given ID
Deletes the Validation Request with the given ID. After a request is created via a POST to /nifi-api/validation-contexts, it is expected that the client will properly clean up the request by DELETE'ing it, once the validation process has completed. If the request is deleted before the request completes, then the Validation request will finish the step that it is currently performing and then will cancel any subsequent steps.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_validation_request_with_http_info(context_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str context_id: The ID of the Parameter Context (required)
:param str id: The ID of the Update Request (required)
:param bool disconnected_node_acknowledged: Acknowledges that this node is disconnected to allow for mutable requests to proceed.
:return: ParameterContextValidationRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['context_id', 'id', 'disconnected_node_acknowledged']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_validation_request" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'context_id' is set
if ('context_id' not in params) or (params['context_id'] is None):
raise ValueError("Missing the required parameter `context_id` when calling `delete_validation_request`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_validation_request`")
collection_formats = {}
path_params = {}
if 'context_id' in params:
path_params['contextId'] = params['context_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = []
if 'disconnected_node_acknowledged' in params:
query_params.append(('disconnectedNodeAcknowledged', params['disconnected_node_acknowledged']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/parameter-contexts/{contextId}/validation-requests/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ParameterContextValidationRequestEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_parameter_context(self, id, **kwargs):
"""
Returns the Parameter Context with the given ID
Returns the Parameter Context with the given ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_parameter_context(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The ID of the Parameter Context (required)
:return: ParameterContextEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_parameter_context_with_http_info(id, **kwargs)
else:
(data) = self.get_parameter_context_with_http_info(id, **kwargs)
return data
def get_parameter_context_with_http_info(self, id, **kwargs):
"""
Returns the Parameter Context with the given ID
Returns the Parameter Context with the given ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_parameter_context_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The ID of the Parameter Context (required)
:return: ParameterContextEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameter_context" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_parameter_context`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/parameter-contexts/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ParameterContextEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_parameter_context_update(self, context_id, request_id, **kwargs):
"""
Returns the Update Request with the given ID
Returns the Update Request with the given ID. Once an Update Request has been created by performing a POST to /nifi-api/parameter-contexts, that request can subsequently be retrieved via this endpoint, and the request that is fetched will contain the updated state, such as percent complete, the current state of the request, and any failures.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_parameter_context_update(context_id, request_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str context_id: The ID of the Parameter Context (required)
:param str request_id: The ID of the Update Request (required)
:return: ParameterContextUpdateRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_parameter_context_update_with_http_info(context_id, request_id, **kwargs)
else:
(data) = self.get_parameter_context_update_with_http_info(context_id, request_id, **kwargs)
return data
def get_parameter_context_update_with_http_info(self, context_id, request_id, **kwargs):
"""
Returns the Update Request with the given ID
Returns the Update Request with the given ID. Once an Update Request has been created by performing a POST to /nifi-api/parameter-contexts, that request can subsequently be retrieved via this endpoint, and the request that is fetched will contain the updated state, such as percent complete, the current state of the request, and any failures.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_parameter_context_update_with_http_info(context_id, request_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str context_id: The ID of the Parameter Context (required)
:param str request_id: The ID of the Update Request (required)
:return: ParameterContextUpdateRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['context_id', 'request_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameter_context_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'context_id' is set
if ('context_id' not in params) or (params['context_id'] is None):
raise ValueError("Missing the required parameter `context_id` when calling `get_parameter_context_update`")
# verify the required parameter 'request_id' is set
if ('request_id' not in params) or (params['request_id'] is None):
raise ValueError("Missing the required parameter `request_id` when calling `get_parameter_context_update`")
collection_formats = {}
path_params = {}
if 'context_id' in params:
path_params['contextId'] = params['context_id']
if 'request_id' in params:
path_params['requestId'] = params['request_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/parameter-contexts/{contextId}/update-requests/{requestId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ParameterContextUpdateRequestEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_validation_request(self, context_id, id, **kwargs):
"""
Returns the Validation Request with the given ID
Returns the Validation Request with the given ID. Once a Validation Request has been created by performing a POST to /nifi-api/validation-contexts, that request can subsequently be retrieved via this endpoint, and the request that is fetched will contain the updated state, such as percent complete, the current state of the request, and any failures.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_validation_request(context_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str context_id: The ID of the Parameter Context (required)
:param str id: The ID of the Validation Request (required)
:return: ParameterContextValidationRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_validation_request_with_http_info(context_id, id, **kwargs)
else:
(data) = self.get_validation_request_with_http_info(context_id, id, **kwargs)
return data
def get_validation_request_with_http_info(self, context_id, id, **kwargs):
"""
Returns the Validation Request with the given ID
Returns the Validation Request with the given ID. Once a Validation Request has been created by performing a POST to /nifi-api/validation-contexts, that request can subsequently be retrieved via this endpoint, and the request that is fetched will contain the updated state, such as percent complete, the current state of the request, and any failures.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_validation_request_with_http_info(context_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str context_id: The ID of the Parameter Context (required)
:param str id: The ID of the Validation Request (required)
:return: ParameterContextValidationRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['context_id', 'id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_validation_request" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'context_id' is set
if ('context_id' not in params) or (params['context_id'] is None):
raise ValueError("Missing the required parameter `context_id` when calling `get_validation_request`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_validation_request`")
collection_formats = {}
path_params = {}
if 'context_id' in params:
path_params['contextId'] = params['context_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/parameter-contexts/{contextId}/validation-requests/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ParameterContextValidationRequestEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def submit_parameter_context_update(self, context_id, body, **kwargs):
"""
Initiate the Update Request of a Parameter Context
This will initiate the process of updating a Parameter Context. Changing the value of a Parameter may require that one or more components be stopped and restarted, so this acttion may take significantly more time than many other REST API actions. As a result, this endpoint will immediately return a ParameterContextUpdateRequestEntity, and the process of updating the necessary components will occur asynchronously in the background. The client may then periodically poll the status of the request by issuing a GET request to /parameter-contexts/update-requests/{requestId}. Once the request is completed, the client is expected to issue a DELETE request to /parameter-contexts/update-requests/{requestId}.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.submit_parameter_context_update(context_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str context_id: (required)
:param ParameterContextEntity body: The updated version of the parameter context. (required)
:return: ParameterContextUpdateRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.submit_parameter_context_update_with_http_info(context_id, body, **kwargs)
else:
(data) = self.submit_parameter_context_update_with_http_info(context_id, body, **kwargs)
return data
def submit_parameter_context_update_with_http_info(self, context_id, body, **kwargs):
"""
Initiate the Update Request of a Parameter Context
This will initiate the process of updating a Parameter Context. Changing the value of a Parameter may require that one or more components be stopped and restarted, so this acttion may take significantly more time than many other REST API actions. As a result, this endpoint will immediately return a ParameterContextUpdateRequestEntity, and the process of updating the necessary components will occur asynchronously in the background. The client may then periodically poll the status of the request by issuing a GET request to /parameter-contexts/update-requests/{requestId}. Once the request is completed, the client is expected to issue a DELETE request to /parameter-contexts/update-requests/{requestId}.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.submit_parameter_context_update_with_http_info(context_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str context_id: (required)
:param ParameterContextEntity body: The updated version of the parameter context. (required)
:return: ParameterContextUpdateRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['context_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method submit_parameter_context_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'context_id' is set
if ('context_id' not in params) or (params['context_id'] is None):
raise ValueError("Missing the required parameter `context_id` when calling `submit_parameter_context_update`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `submit_parameter_context_update`")
collection_formats = {}
path_params = {}
if 'context_id' in params:
path_params['contextId'] = params['context_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/parameter-contexts/{contextId}/update-requests', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ParameterContextUpdateRequestEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def submit_validation_request(self, context_id, body, **kwargs):
"""
Initiate a Validation Request to determine how the validity of components will change if a Parameter Context were to be updated
This will initiate the process of validating all components whose Process Group is bound to the specified Parameter Context. Performing validation against an arbitrary number of components may be expect and take significantly more time than many other REST API actions. As a result, this endpoint will immediately return a ParameterContextValidationRequestEntity, and the process of validating the necessary components will occur asynchronously in the background. The client may then periodically poll the status of the request by issuing a GET request to /parameter-contexts/validation-requests/{requestId}. Once the request is completed, the client is expected to issue a DELETE request to /parameter-contexts/validation-requests/{requestId}.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.submit_validation_request(context_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str context_id: (required)
:param ParameterContextValidationRequestEntity body: The validation request (required)
:return: ParameterContextValidationRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.submit_validation_request_with_http_info(context_id, body, **kwargs)
else:
(data) = self.submit_validation_request_with_http_info(context_id, body, **kwargs)
return data
def submit_validation_request_with_http_info(self, context_id, body, **kwargs):
"""
Initiate a Validation Request to determine how the validity of components will change if a Parameter Context were to be updated
This will initiate the process of validating all components whose Process Group is bound to the specified Parameter Context. Performing validation against an arbitrary number of components may be expect and take significantly more time than many other REST API actions. As a result, this endpoint will immediately return a ParameterContextValidationRequestEntity, and the process of validating the necessary components will occur asynchronously in the background. The client may then periodically poll the status of the request by issuing a GET request to /parameter-contexts/validation-requests/{requestId}. Once the request is completed, the client is expected to issue a DELETE request to /parameter-contexts/validation-requests/{requestId}.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.submit_validation_request_with_http_info(context_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str context_id: (required)
:param ParameterContextValidationRequestEntity body: The validation request (required)
:return: ParameterContextValidationRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['context_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method submit_validation_request" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'context_id' is set
if ('context_id' not in params) or (params['context_id'] is None):
raise ValueError("Missing the required parameter `context_id` when calling `submit_validation_request`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `submit_validation_request`")
collection_formats = {}
path_params = {}
if 'context_id' in params:
path_params['contextId'] = params['context_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/parameter-contexts/{contextId}/validation-requests', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ParameterContextValidationRequestEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_parameter_context(self, id, body, **kwargs):
"""
Modifies a Parameter Context
This endpoint will update a Parameter Context to match the provided entity. However, this request will fail if any component is running and is referencing a Parameter in the Parameter Context. Generally, this endpoint is not called directly. Instead, an update request should be submitted by making a POST to the /parameter-contexts/update-requests endpoint. That endpoint will, in turn, call this endpoint.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_parameter_context(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: (required)
:param ParameterContextEntity body: The updated Parameter Context (required)
:return: ParameterContextEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_parameter_context_with_http_info(id, body, **kwargs)
else:
(data) = self.update_parameter_context_with_http_info(id, body, **kwargs)
return data
def update_parameter_context_with_http_info(self, id, body, **kwargs):
"""
Modifies a Parameter Context
This endpoint will update a Parameter Context to match the provided entity. However, this request will fail if any component is running and is referencing a Parameter in the Parameter Context. Generally, this endpoint is not called directly. Instead, an update request should be submitted by making a POST to the /parameter-contexts/update-requests endpoint. That endpoint will, in turn, call this endpoint.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_parameter_context_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: (required)
:param ParameterContextEntity body: The updated Parameter Context (required)
:return: ParameterContextEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_parameter_context" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_parameter_context`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_parameter_context`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/parameter-contexts/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ParameterContextEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 50.661827
| 752
| 0.615306
| 6,414
| 59,325
| 5.501403
| 0.0488
| 0.046704
| 0.01587
| 0.020405
| 0.973842
| 0.968741
| 0.963045
| 0.957264
| 0.954685
| 0.949215
| 0
| 0.000172
| 0.312819
| 59,325
| 1,170
| 753
| 50.705128
| 0.865381
| 0.41748
| 0
| 0.801394
| 0
| 0
| 0.192511
| 0.076789
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036585
| false
| 0
| 0.012195
| 0
| 0.102787
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a85d28a147c2851074f1bb71e893b4e8755ebe06
| 660
|
py
|
Python
|
aws_lambda_typing/events/config.py
|
curekoshimizu/aws-lambda-typing
|
ea5649e084f598f02dadad1a60927034ea46ae5d
|
[
"MIT"
] | null | null | null |
aws_lambda_typing/events/config.py
|
curekoshimizu/aws-lambda-typing
|
ea5649e084f598f02dadad1a60927034ea46ae5d
|
[
"MIT"
] | null | null | null |
aws_lambda_typing/events/config.py
|
curekoshimizu/aws-lambda-typing
|
ea5649e084f598f02dadad1a60927034ea46ae5d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import typing
class ConfigEvent(typing.TypedDict):
"""
MQEvent https://docs.aws.amazon.com/lambda/latest/dg/services-config.html
Attributes:
----------
invokingEvent: str
ruleParameters: str
resultToken: str
eventLeftScope: bool
executionRoleArn: str
configRuleArn: str
configRuleName: str
configRuleId: str
accountId: str
version: str
"""
invokingEvent: str
ruleParameters: str
resultToken: str
eventLeftScope: bool
executionRoleArn: str
configRuleArn: str
configRuleName: str
configRuleId: str
accountId: str
version: str
| 15.348837
| 77
| 0.660606
| 63
| 660
| 6.920635
| 0.539683
| 0.073395
| 0.137615
| 0.151376
| 0.706422
| 0.706422
| 0.706422
| 0.706422
| 0.706422
| 0.706422
| 0
| 0
| 0.251515
| 660
| 42
| 78
| 15.714286
| 0.882591
| 0.471212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.083333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
765266b4bc6c376f8d781ac5b9e414a2c40c4c70
| 6,546
|
py
|
Python
|
parser/team12/src/EXPRESION/EXPRESION_RELACIONAL/Expresion_Relacional.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 35
|
2020-12-07T03:11:43.000Z
|
2021-04-15T17:38:16.000Z
|
parser/team12/src/EXPRESION/EXPRESION_RELACIONAL/Expresion_Relacional.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 47
|
2020-12-09T01:29:09.000Z
|
2021-01-13T05:37:50.000Z
|
parser/team12/src/EXPRESION/EXPRESION_RELACIONAL/Expresion_Relacional.py
|
webdev188/tytus
|
847071edb17b218f51bb969d335a8ec093d13f94
|
[
"MIT"
] | 556
|
2020-12-07T03:13:31.000Z
|
2021-06-17T17:41:10.000Z
|
import sys, os.path
import datetime
nodo_dir = (os.path.abspath(os.path.join(os.path.dirname(__file__), '..','..')) + '\\ENTORNO\\')
sys.path.append(nodo_dir)
from Tipo import Data_Type
# **************************************************************************************************************
def diferente(exp1, exp2, expRes, enviroment):
val1 = exp1.execute(enviroment)
val2 = exp2.execute(enviroment)
if exp1.tipo.data_type == Data_Type.numeric and exp2.tipo.data_type == Data_Type.numeric :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 != val2
elif exp1.tipo.data_type == Data_Type.character and exp2.tipo.data_type == Data_Type.character :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 != val2
elif exp1.tipo.data_type == Data_Type.boolean and exp2.tipo.data_type == Data_Type.boolean :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 != val2
else:
expRes.tipo.data_type = Data_Type.error
expRes.valorExpresion = None
return expRes
# **************************************************************************************************************
# **************************************************************************************************************
def igualdad(exp1, exp2, expRes, enviroment):
val1 = exp1.execute(enviroment)
val2 = exp2.execute(enviroment)
if exp1.tipo.data_type == Data_Type.numeric and exp2.tipo.data_type == Data_Type.numeric :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 == val2
elif exp1.tipo.data_type == Data_Type.character and exp2.tipo.data_type == Data_Type.character :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 == val2
elif exp1.tipo.data_type == Data_Type.boolean and exp2.tipo.data_type == Data_Type.boolean :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 == val2
else:
expRes.tipo.data_type = Data_Type.error
expRes.valorExpresion = None
return expRes
# **************************************************************************************************************
# **************************************************************************************************************
def mayor(exp1, exp2, expRes, enviroment):
val1 = exp1.execute(enviroment)
val2 = exp2.execute(enviroment)
if exp1.tipo.data_type == Data_Type.numeric and exp2.tipo.data_type == Data_Type.numeric :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 > val2
elif exp1.tipo.data_type == Data_Type.character and exp2.tipo.data_type == Data_Type.character :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 > val2
elif exp1.tipo.data_type == Data_Type.boolean and exp2.tipo.data_type == Data_Type.boolean :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 > val2
else:
expRes.tipo.data_type = Data_Type.error
expRes.valorExpresion = None
return expRes
# **************************************************************************************************************
# **************************************************************************************************************
def mayorigual(exp1, exp2, expRes, enviroment):
val1 = exp1.execute(enviroment)
val2 = exp2.execute(enviroment)
if exp1.tipo.data_type == Data_Type.numeric and exp2.tipo.data_type == Data_Type.numeric :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 >= val2
elif exp1.tipo.data_type == Data_Type.character and exp2.tipo.data_type == Data_Type.character :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 >= val2
elif exp1.tipo.data_type == Data_Type.boolean and exp2.tipo.data_type == Data_Type.boolean :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 >= val2
else:
expRes.tipo.data_type = Data_Type.error
expRes.valorExpresion = None
return expRes
# **************************************************************************************************************
# **************************************************************************************************************
def menor(exp1, exp2, expRes, enviroment):
val1 = exp1.execute(enviroment)
val2 = exp2.execute(enviroment)
if exp1.tipo.data_type == Data_Type.numeric and exp2.tipo.data_type == Data_Type.numeric :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 < val2
elif exp1.tipo.data_type == Data_Type.character and exp2.tipo.data_type == Data_Type.character :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 < val2
elif exp1.tipo.data_type == Data_Type.boolean and exp2.tipo.data_type == Data_Type.boolean :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 < val2
else:
expRes.tipo.data_type = Data_Type.error
expRes.valorExpresion = None
return expRes
# **************************************************************************************************************
# **************************************************************************************************************
def menorigual(exp1, exp2, expRes, enviroment):
val1 = exp1.execute(enviroment)
val2 = exp2.execute(enviroment)
if exp1.tipo.data_type == Data_Type.numeric and exp2.tipo.data_type == Data_Type.numeric :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 <= val2
elif exp1.tipo.data_type == Data_Type.character and exp2.tipo.data_type == Data_Type.character :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 <= val2
elif exp1.tipo.data_type == Data_Type.boolean and exp2.tipo.data_type == Data_Type.boolean :
expRes.tipo.data_type = Data_Type.boolean
expRes.valorExpresion = val1 <= val2
else:
expRes.tipo.data_type = Data_Type.error
expRes.valorExpresion = None
return expRes
# **************************************************************************************************************
| 38.505882
| 112
| 0.540636
| 677
| 6,546
| 5.039882
| 0.062038
| 0.283705
| 0.21102
| 0.28136
| 0.94871
| 0.94871
| 0.94871
| 0.94871
| 0.94871
| 0.94871
| 0
| 0.020108
| 0.179499
| 6,546
| 170
| 113
| 38.505882
| 0.615155
| 0.20333
| 0
| 0.891089
| 0
| 0
| 0.002883
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059406
| false
| 0
| 0.029703
| 0
| 0.148515
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.