hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e6419912deef037c8820d01fc0fe6bb7e090c267 | 75 | py | Python | python/cudf/cudf/_libxx/__init__.py | CZZLEGEND/cudf | 5d2465d6738d00628673fffdc1fac51fad7ef9a7 | [
"Apache-2.0"
] | 1 | 2020-01-14T01:44:35.000Z | 2020-01-14T01:44:35.000Z | python/cudf/cudf/_libxx/__init__.py | CZZLEGEND/cudf | 5d2465d6738d00628673fffdc1fac51fad7ef9a7 | [
"Apache-2.0"
] | null | null | null | python/cudf/cudf/_libxx/__init__.py | CZZLEGEND/cudf | 5d2465d6738d00628673fffdc1fac51fad7ef9a7 | [
"Apache-2.0"
] | null | null | null | from cudf._libxx.gather import gather
from cudf._libxx.table import _Table
| 25 | 37 | 0.84 | 12 | 75 | 5 | 0.5 | 0.266667 | 0.433333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.106667 | 75 | 2 | 38 | 37.5 | 0.895522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
053ca4dd2309bc753394318f7f7e8a821dcbcd13 | 193 | py | Python | end_timing.py | gholamw/DNS-QUIC | c020cbf69b3067eb5862a206a317b48feca798fe | [
"MIT"
] | null | null | null | end_timing.py | gholamw/DNS-QUIC | c020cbf69b3067eb5862a206a317b48feca798fe | [
"MIT"
] | null | null | null | end_timing.py | gholamw/DNS-QUIC | c020cbf69b3067eb5862a206a317b48feca798fe | [
"MIT"
] | null | null | null | import timeit
import start_timing
import start_timing
end = timeit.timeit()
print(";; more precise timing of the query: ", abs(end - start_timing.start))
#print (abs(end - start_timing.start)) | 27.571429 | 77 | 0.756477 | 29 | 193 | 4.896552 | 0.413793 | 0.309859 | 0.239437 | 0.239437 | 0.309859 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.124352 | 193 | 7 | 78 | 27.571429 | 0.840237 | 0.19171 | 0 | 0.4 | 0 | 0 | 0.237179 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.6 | 0 | 0.6 | 0.2 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
05550d7dbf5296d989c79c200fbda0083d722df0 | 10,732 | py | Python | secret_sdk/client/lcd/wallet.py | abduramann/secret-sdk-python | a6b0103569052c9526fc802e8d6ffc22f0a3c001 | [
"MIT"
] | 26 | 2022-01-28T21:19:42.000Z | 2022-03-28T01:56:11.000Z | secret_sdk/client/lcd/wallet.py | abduramann/secret-sdk-python | a6b0103569052c9526fc802e8d6ffc22f0a3c001 | [
"MIT"
] | 6 | 2022-01-29T10:02:18.000Z | 2022-03-17T03:01:40.000Z | secret_sdk/client/lcd/wallet.py | abduramann/secret-sdk-python | a6b0103569052c9526fc802e8d6ffc22f0a3c001 | [
"MIT"
] | 7 | 2022-01-28T19:46:02.000Z | 2022-03-19T15:18:18.000Z | from __future__ import annotations
from typing import Dict, List, Optional
from secret_sdk.core import AccAddress, Coins, Numeric
from secret_sdk.core.auth import StdFee, StdSignMsg, StdTx
from secret_sdk.core.bank import MsgSend
from secret_sdk.core.msg import Msg
from secret_sdk.key.key import Key
__all__ = ["Wallet", "AsyncWallet"]
class AsyncWallet:
def __init__(self, lcd, key: Key):
self.lcd = lcd
self.key = key
async def account_number(self) -> int:
res = await self.lcd.auth.account_info(self.key.acc_address)
return res.account_number
async def sequence(self) -> int:
res = await self.lcd.auth.account_info(self.key.acc_address)
return res.sequence
async def account_number_and_sequence(self) -> dict:
res = await self.lcd.auth.account_info(self.key.acc_address)
return {"account_number": res.account_number, "sequence": res.sequence}
async def create_tx(
self,
msgs: List[Msg],
fee: Optional[StdFee] = None,
memo: str = "",
gas: Optional[int] = None,
gas_prices: Optional[Coins.Input] = None,
gas_adjustment: Optional[Numeric.Input] = None,
fee_denoms: Optional[List[str]] = None,
account_number: Optional[int] = None,
sequence: Optional[int] = None,
) -> StdSignMsg:
return await self.lcd.tx.create(
sender=self.key.acc_address,
msgs=msgs,
fee=fee,
memo=memo,
gas=gas,
gas_prices=gas_prices,
gas_adjustment=gas_adjustment,
fee_denoms=fee_denoms,
account_number=account_number,
sequence=sequence,
)
async def create_and_sign_tx(
self,
msgs: List[Msg],
fee: Optional[StdFee] = None,
memo: str = "",
gas: Optional[int] = None,
gas_prices: Optional[Coins.Input] = None,
gas_adjustment: Optional[Numeric.Input] = None,
fee_denoms: Optional[List[str]] = None,
account_number: Optional[int] = None,
sequence: Optional[int] = None,
) -> StdTx:
tx = await self.create_tx(
msgs,
fee,
memo,
gas,
gas_prices,
gas_adjustment,
fee_denoms,
account_number,
sequence,
)
return self.key.sign_tx(tx)
async def execute_tx(
self,
contract_addr: str,
handle_msg: Dict,
memo: str = "",
transfer_amount: Coins = None,
gas: Optional[int] = None,
gas_prices: Optional[Coins.Input] = None,
gas_adjustment: Optional[Numeric.Input] = None,
fee_denoms: Optional[List[str]] = None,
) -> StdTx:
if gas is None or gas_prices is None:
fee = self.lcd.custom_fees["exec"]
else:
fee = await self.lcd.tx.estimate_fee(
gas, gas_prices, gas_adjustment, fee_denoms
)
execute_msg = await self.lcd.wasm.contract_execute_msg(
self.key.acc_address, contract_addr, handle_msg, transfer_amount
)
signed_tx = await self.create_and_sign_tx([execute_msg], fee=fee, memo=memo)
tx = await self.lcd.tx.broadcast(signed_tx)
return tx
async def send_tokens(
self,
recipient_addr: AccAddress,
memo: str = "",
transfer_amount: Coins = None,
gas: Optional[int] = None,
gas_prices: Optional[Coins.Input] = None,
gas_adjustment: Optional[Numeric.Input] = None,
fee_denoms: Optional[List[str]] = None,
) -> StdTx:
if gas is None or gas_prices is None:
fee = self.lcd.custom_fees["send"]
else:
fee = await self.lcd.tx.estimate_fee(
gas, gas_prices, gas_adjustment, fee_denoms
)
send_msg = MsgSend(self.key.acc_address, recipient_addr, transfer_amount)
signed_tx = await self.create_and_sign_tx([send_msg], fee=fee, memo=memo)
tx = await self.lcd.tx.broadcast(signed_tx)
return tx
class Wallet:
"""Wraps around a :class:`Key` implementation and provides transaction building and
signing functionality. It is recommended to create this object through
:meth:`LCDClient.wallet()<secret_sdk.client.lcd.LCDClient.wallet>`."""
def __init__(self, lcd, key: Key):
self.lcd = lcd
self.key = key
def account_number(self) -> int:
"""Fetches account number for the account associated with the Key."""
res = self.lcd.auth.account_info(self.key.acc_address)
return res.account_number
def sequence(self) -> int:
"""Fetches the sequence number for the account associated with the Key."""
res = self.lcd.auth.account_info(self.key.acc_address)
return res.sequence
def account_number_and_sequence(self) -> dict:
"""Fetches both account and sequence number associated with the Key."""
res = self.lcd.auth.account_info(self.key.acc_address)
return {"account_number": res.account_number, "sequence": res.sequence}
def create_tx(
self,
msgs: List[Msg],
fee: Optional[StdFee] = None,
memo: str = "",
gas: Optional[int] = None,
gas_prices: Optional[Coins.Input] = None,
gas_adjustment: Optional[Numeric.Input] = None,
fee_denoms: Optional[List[str]] = None,
account_number: Optional[int] = None,
sequence: Optional[int] = None,
) -> StdSignMsg:
"""Builds an unsigned transaction object. The ``Wallet`` will first
query the blockchain to fetch the latest ``account`` and ``sequence`` values for the
account corresponding to its Key, unless the they are both provided. If no ``fee``
parameter is set, automatic fee estimation will be used (see `fee_estimation`).
Args:
msgs (List[Msg]): list of messages to include
fee (Optional[StdFee], optional): transaction fee. If ``None``, will be estimated.
See more on `fee estimation`_.
memo (str, optional): optional short string to include with transaction.
gas (Optional[int]) gas
gas_prices (Optional[Coins.Input], optional): gas prices for fee estimation.
gas_adjustment (Optional[Numeric.Input], optional): gas adjustment for fee estimation.
fee_denoms (Optional[List[str]], optional): list of denoms to use for fee after estimation.
account_number (Optional[int], optional): account number (overrides blockchain query if
provided)
sequence (Optional[int], optional): sequence (overrides blockchain qu ery if provided)
Returns:
StdSignMsg: unsigned transaction
"""
return self.lcd.tx.create(
sender=self.key.acc_address,
msgs=msgs,
fee=fee,
memo=memo,
gas=gas,
gas_prices=gas_prices,
gas_adjustment=gas_adjustment,
fee_denoms=fee_denoms,
account_number=account_number,
sequence=sequence,
)
def create_and_sign_tx(
self,
msgs: List[Msg],
fee: Optional[StdFee] = None,
memo: str = "",
gas: Optional[int] = None,
gas_prices: Optional[Coins.Input] = None,
gas_adjustment: Optional[Numeric.Input] = None,
fee_denoms: Optional[List[str]] = None,
account_number: Optional[int] = None,
sequence: Optional[int] = None,
) -> StdTx:
"""Creates and signs a :class:`StdTx` object in a single step. This is the recommended
method for preparing transaction for immediate signing and broadcastring. The transaction
is generated exactly as :meth:`create_tx`.
Args:
msgs (List[Msg]): list of messages to include
fee (Optional[StdFee], optional): transaction fee. If ``None``, will be estimated.
See more on `fee estimation`_.
memo (str, optional): optional short string to include with transaction.
gas (Optional[int]) gas
gas_prices (Optional[Coins.Input], optional): gas prices for fee estimation.
gas_adjustment (Optional[Numeric.Input], optional): gas adjustment for fee estimation.
fee_denoms (Optional[List[str]], optional): list of denoms to use for fee after estimation.
account_number (Optional[int], optional): account number (overrides blockchain query if
provided)
sequence (Optional[int], optional): sequence (overrides blockchain qu ery if provided)
Returns:
StdTx: signed transaction
"""
return self.key.sign_tx(
self.create_tx(
msgs,
fee,
memo,
gas,
gas_prices,
gas_adjustment,
fee_denoms,
account_number,
sequence,
)
)
def execute_tx(
self,
contract_addr: str,
handle_msg: Dict,
memo: str = "",
transfer_amount: Coins = None,
gas: Optional[int] = None,
gas_prices: Optional[Coins.Input] = None,
gas_adjustment: Optional[Numeric.Input] = None,
fee_denoms: Optional[List[str]] = None,
) -> StdTx:
if gas is None or gas_prices is None:
fee = self.lcd.custom_fees["exec"]
else:
fee = self.lcd.tx.estimate_fee(gas, gas_prices, gas_adjustment, fee_denoms)
execute_msg = self.lcd.wasm.contract_execute_msg(
self.key.acc_address, contract_addr, handle_msg, transfer_amount
)
signed_tx = self.create_and_sign_tx([execute_msg], fee=fee, memo=memo)
tx = self.lcd.tx.broadcast(signed_tx)
return tx
def send_tokens(
self,
recipient_addr: AccAddress,
memo: str = "",
transfer_amount: Coins = None,
gas: Optional[int] = None,
gas_prices: Optional[Coins.Input] = None,
gas_adjustment: Optional[Numeric.Input] = None,
fee_denoms: Optional[List[str]] = None,
) -> StdTx:
if gas is None or gas_prices is None:
fee = self.lcd.custom_fees["send"]
else:
fee = self.lcd.tx.estimate_fee(gas, gas_prices, gas_adjustment, fee_denoms)
send_msg = MsgSend(self.key.acc_address, recipient_addr, transfer_amount)
signed_tx = self.create_and_sign_tx([send_msg], fee=fee, memo=memo)
tx = self.lcd.tx.broadcast(signed_tx)
return tx
| 37.393728 | 103 | 0.605758 | 1,282 | 10,732 | 4.907176 | 0.117785 | 0.02893 | 0.03815 | 0.032427 | 0.825624 | 0.813384 | 0.813384 | 0.802257 | 0.802257 | 0.802257 | 0 | 0 | 0.298267 | 10,732 | 286 | 104 | 37.524476 | 0.835347 | 0.237514 | 0 | 0.788732 | 0 | 0 | 0.009794 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.042254 | false | 0 | 0.032864 | 0 | 0.150235 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
058d9b1987a8593b3b8c0f8da9bdf27a8a9a14b6 | 19,158 | py | Python | QuantumInformation/chap3_linearalgebra.py | pranay1990/QuantumInformation | 6588e623c3c4839e2b484a5ce57bb9aac9bb458c | [
"Unlicense"
] | null | null | null | QuantumInformation/chap3_linearalgebra.py | pranay1990/QuantumInformation | 6588e623c3c4839e2b484a5ce57bb9aac9bb458c | [
"Unlicense"
] | null | null | null | QuantumInformation/chap3_linearalgebra.py | pranay1990/QuantumInformation | 6588e623c3c4839e2b484a5ce57bb9aac9bb458c | [
"Unlicense"
] | null | null | null | """
Created on Wed Nov 4
@authors: Dr. M. S. Ramkarthik and Dr. Pranay Barkataki
"""
import numpy as np
import math
import cmath
import scipy.linalg.lapack as la
class LinearAlgebra:
def inverse_matrix(self,mat):
""" Calculates the inverse of a matrix
Attributes:
mat : Inverse of the array or matrix to be calculated.
Return: inverse of matrix mat
"""
assert np.linalg.det(mat) != 0, "Determinant of the matrix is zero"
return np.linalg.inv(mat)
def power_smatrix(self,mat1,k,precision=10**(-10)):
"""
It calculates the power of a real symmetric matrix.
Attributes:
mat1 : The matrix or array of which power is to be calculated.
k : value of the power
precision: if the absolute eigenvalues below the precision
value will be considered as zero
Return: k'th Power of symmetric matrix mat1
"""
eigenvalues,eigenvectors,info=la.dsyev(mat1)
flag=0
for i in eigenvalues:
if i < 0.0:
flag=1
if flag==0:
diag=np.zeros([eigenvectors.shape[0],eigenvectors.shape[1]],\
dtype='float64')
else:
diag=np.zeros([eigenvectors.shape[0],eigenvectors.shape[1]],\
dtype='complex_')
for i in range(0,eigenvectors.shape[0]):
if abs(eigenvalues[i]) <= precision:
diag[i,i]=0.0
eigenvalues[i]=0.0
if eigenvalues[i] < 0.0:
diag[i,i]=pow(abs(eigenvalues.item(i)),k)*pow(complex(0,1),2*k)
else:
diag[i,i]=pow(eigenvalues.item(i),k)
diag=np.matmul(np.matmul(eigenvectors,diag),np.transpose(eigenvectors))
return diag
def power_hmatrix(self,mat1,k,precision=10**(-10)):
"""
It calculates the power of a Hermitian matrix.
Attributes:
mat1 : The matrix or array of which power is to be calculated.
k : value of the power
precision: if the absolute eigenvalues below the precision
value will be considered as zero
Return: k'th Power of Hermitian matrix mat1
"""
eigenvalues,eigenvectors,info=la.zheev(mat1)
flag=0
for i in eigenvalues:
if i < 0.0:
flag=1
if flag==0:
diag=np.zeros([eigenvectors.shape[0],eigenvectors.shape[1]],\
dtype='float64')
else:
diag=np.zeros([eigenvectors.shape[0],eigenvectors.shape[1]],\
dtype='complex_')
for i in range(0,eigenvectors.shape[0]):
if abs(eigenvalues[i]) <= precision:
diag[i,i]=0.0
eigenvalues[i]=0.0
if eigenvalues[i] < 0.0:
diag[i,i]=pow(abs(eigenvalues.item(i)),k)*pow(complex(0,1),2*k)
else:
diag[i,i]=pow(eigenvalues.item(i),k)
diag=np.matmul(np.matmul(eigenvectors,diag),np.conjugate(\
np.transpose(eigenvectors)))
return diag
def power_gmatrix(self,mat1,k,precision=10**(-10)):
"""
Calculates the power of a general non-Herimitian matrix
Attributes:
mat1 : The matrix or array of which power is to be calculated.
k : value of the power
precision: if the absolute eigenvalues below the precision
value will be considered as zero
Return: k'th Power of non-Hermitian matrix mat1
"""
eigenvalues,eigenvectors=np.linalg.eig(mat1)
diag=np.zeros([eigenvectors.shape[0],eigenvectors.shape[1]],\
dtype=np.complex_)
for i in range(0,eigenvectors.shape[0]):
if abs(eigenvalues[i]) <= precision:
diag[i,i]=complex(0.0,0.0)
else:
diag[i,i]=pow(eigenvalues.item(i),k)
diag=np.matmul(np.matmul(eigenvectors,diag),np.linalg.inv(eigenvectors))
return diag
def function_smatrix(self, mat1, mode="exp",log_base=2):
"""
It calculates the function of a real symmetric matrix.
Attributes:
mat1 : The symmetric matrix of which function is to be calculated.
mode: Primarily calculates the following,
mode='exp': Exponential of a matrix. It is the default mode.
mode='sin': sine of a matrix.
mode='cos': cosine of matrix.
mode='tan': tan of matrix.
mode='log': Logarithm of a matrix, by default log base 2.
log_base: base of the log function
Return: Function of symmetric matrix mat1
"""
assert np.allclose(mat1, np.matrix.transpose(mat1))==True,\
"The matrix entered is not a symmetric matrix"
assert mat1.shape[0] == mat1.shape[1],\
"Entered matrix is not a square matrix"
if mode not in ["exp","sin","cos","tan","log"]:
raise Exception(f"Sorry, the entered mode {mode} is not available")
eigenvalues,eigenvectors,info=la.dsyev(mat1)
if mode == 'exp':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=float)
for i in range(0,diagonal.shape[0]):
diagonal[i,i] = math.exp(eigenvalues[i])
if mode == 'sin':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=float)
for i in range(0,diagonal.shape[0]):
diagonal[i,i] = math.sin(eigenvalues[i])
if mode == 'cos':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=float)
for i in range(0,diagonal.shape[0]):
diagonal[i,i] = math.cos(eigenvalues[i])
if mode == 'tan':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=float)
for i in range(0,diagonal.shape[0]):
diagonal[i,i] = math.tan(eigenvalues[i])
if mode == 'log':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=float)
for i in range(0,diagonal.shape[0]):
assert eigenvalues[i] > 0.0,\
"eigenvalues of the matrix are negative or zero"
diagonal[i,i] = math.log(eigenvalues[i],log_base)
return np.matmul(np.matmul(eigenvectors,diagonal),\
np.matrix.transpose(eigenvectors))
def function_hmatrix(self, mat1, mode="exp",log_base=2):
"""
It calculates the function of hermitian matrix.
Attributes:
mat1 : The Hermitian matrix of which function is to be calculated.
mode: Primarily calculates the following,
mode='exp': Exponential of a matrix. It is the default mode.
mode='sin': sine of a matrix.
mode='cos': cosine of matrix.
mode='tan': tan of matrix.
mode='log': Logarithm of a matrix, by default log base 2.
log_base: base of the log function
Return: Function of Hermitian matrix mat1
"""
assert np.allclose(mat1, np.transpose(np.conjugate(mat1)))==True \
,"The matrix entered is not a hermitian matrix"
assert mat1.shape[0] == mat1.shape[1],\
"Entered matrix is not a square matrix"
if mode not in ["exp","sin","cos","tan","log"]:
raise Exception(f"Sorry, the entered mode {mode} is not available")
eigenvalues,eigenvectors,info=la.zheev(mat1)
if mode == 'exp':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=float)
for i in range(0,diagonal.shape[0]):
diagonal[i,i] = math.exp(eigenvalues[i])
if mode == 'sin':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=float)
for i in range(0,diagonal.shape[0]):
diagonal[i,i] = math.sin(eigenvalues[i])
if mode == 'cos':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=float)
for i in range(0,diagonal.shape[0]):
diagonal[i,i] = math.cos(eigenvalues[i])
if mode == 'tan':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=float)
for i in range(0,diagonal.shape[0]):
diagonal[i,i] = math.tan(eigenvalues[i])
if mode == 'log':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=float)
for i in range(0,diagonal.shape[0]):
assert eigenvalues[i] > 0.0, "eigenvalues of the matrix are negative"
diagonal[i,i] = math.log(eigenvalues[i],log_base)
return np.matmul(np.matmul(eigenvectors,diagonal),\
np.transpose(np.conjugate(eigenvectors)))
def function_gmatrix(self, mat1, mode="exp",log_base=2):
"""
It calculates the function of general diagonalizable matrix.
Attributes:
mat1: The general matrix of which function is to be calculated.
mode: Primarily calculates the following,
Primarily calculates the following,
mode='exp': Exponential of a matrix.
mode='sin': sine of a matrix.
mode='cos': cosine of matrix.
mode='tan': tan of matrix.
mode='log': Logarithm of a matrix, by default log base 2.
Return: Function of general matrix mat1
"""
assert mat1.shape[0] == mat1.shape[1],\
"Entered matrix is not a square matrix"
if mode not in ["exp","sin","cos","tan","log"]:
raise Exception(f"Sorry, the entered mode {mode} is not available")
eigenvalues,eigenvectors=np.linalg.eig(mat1)
print(eigenvalues)
if mode == 'exp':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=complex)
for i in range(0,diagonal.shape[0]):
diagonal[i,i] = cmath.exp(eigenvalues[i])
if mode == 'sin':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=complex)
for i in range(0,diagonal.shape[0]):
diagonal[i,i] = cmath.sin(eigenvalues[i])
if mode == 'cos':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=complex)
for i in range(0,diagonal.shape[0]):
diagonal[i,i] = cmath.cos(eigenvalues[i])
if mode == 'tan':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=complex)
for i in range(0,diagonal.shape[0]):
diagonal[i,i] = cmath.tan(eigenvalues[i])
if mode == 'log':
diagonal=np.zeros((mat1.shape[0],mat1.shape[1]),dtype=complex)
for i in range(0,diagonal.shape[0]):
diagonal[i,i] = cmath.log(eigenvalues[i],log_base)
assert np.linalg.det(eigenvectors) != 0, "Determinant of eigenvectors \
matrix is zero"
return np.matmul(np.matmul(eigenvectors,diagonal),\
np.linalg.inv(eigenvectors))
def trace_norm_rmatrix(self,mat1, precision=10**(-13)):
"""
Calculates the trace norm of a real matrix
Attributes:
mat1 : The matrix or array of which trace norm is to be calculated.
precision: the absolute value of the eigenvalues below precision
value will be considered as zero
Return:
trace_norm: trace norm of matrix mat1
"""
eigenvalues,eigenvectors,info=la.dsyev(np.matmul(np.transpose(mat1),\
mat1))
trace_norm=0.0
for i in range(len(eigenvalues)):
if abs(eigenvalues[i]) < precision:
eigenvalues[i]=0.0
trace_norm=trace_norm+np.sqrt(eigenvalues[i])
return trace_norm
def trace_norm_cmatrix(self,mat1, precision=10**(-13)):
"""
Calculates the trace norm of a complex matrix
Attributes:
mat1 : The matrix or array of which trace norm is to be calculated.
precision: the absolute value of the eigenvalues below precision
value will be considered as zero.
Return:
trace_norm: trace norm of matrix mat1
"""
eigenvalues,eigenvectors,info=\
la.zheev(np.matmul(np.conjugate(np.transpose(mat1)),mat1))
trace_norm=0.0
for i in range(len(eigenvalues)):
if abs(eigenvalues[i]) < precision:
eigenvalues[i]=0.0
trace_norm=trace_norm+np.sqrt(eigenvalues[i])
return trace_norm
def hilbert_schmidt_norm_rmatrix(self,mat1, precision=10**(-13)):
"""
Calculates the Hilbert-Schmidt norm of matrix of a real matrix
Attributes:
mat1 : The matrix or array of which Hilbert-Schmidt norm
is to be calculated.
precision: tolerance value, the magnitude of eigenvalues below
precision is considered zero
Return:
htrace_norm: Hilbert-Schmidt norm of matrix mat1.
"""
eigenvalues,eigenvectors,info=la.dsyev(np.matmul(np.transpose(mat1),\
mat1))
htrace_norm=0.0
for i in range(len(eigenvalues)):
if abs(eigenvalues[i]) < precision:
eigenvalues[i]=0.0
htrace_norm=htrace_norm+eigenvalues[i]
htrace_norm=np.sqrt(htrace_norm)
return htrace_norm
def hilbert_schmidt_norm_cmatrix(self,mat1, precision=10**(-13)):
"""
Calculates the trace norm of a complex matrix
Attributes:
mat1 : The matrix or array of which Hilbert-Schmidt norm
is to be calculated.
precision: tolerance value, the magnitude of eigenvalues below
precision is considered zero.
Return:
htrace_norm: Hilbert-Schmidt norm of matrix mat1.
"""
eigenvalues,eigenvectors,info=\
la.zheev(np.matmul(np.conjugate(np.transpose(mat1)),mat1))
htrace_norm=0.0
for i in range(len(eigenvalues)):
if abs(eigenvalues[i]) < precision:
eigenvalues[i]=0.0
htrace_norm=htrace_norm+eigenvalues[i]
htrace_norm=np.sqrt(htrace_norm)
return htrace_norm
def absolute_value_rmatrix(self,mat1):
"""
Calculates the absolute value of a real matrix
Attributes:
mat1 : The matrix of which absolute form has to calculated.
Return:
res_mat: Absoulte value of matrix mat1
"""
res_mat=self.power_smatrix(np.matmul(np.transpose(mat1),\
mat1),0.50)
return res_mat
def absolute_value_cmatrix(self,mat1):
"""
Calculates the absolute value of a complex matrix
Attributes:
mat1 : The matrix of which absolute form has to calculated.
Return:
res_mat: Absoulte value of matrix mat1
"""
res_mat=self.power_hmatrix(np.matmul(np.conjugate(np.transpose(mat1)),\
mat1),0.50)
return res_mat
def hilbert_schmidt_inner_product(self,A,B):
"""
Calculates the Hilbert-Schmidt inner product between matrices.
Attributes:
A: It is a complex or real input matrix.
B: It is a complex or real input matrix.
Return: Hilbert-Schmidt inner product between A and B.
"""
return np.trace(np.matmul(np.conjugate(np.transpose(A)),B))
def gram_schmidt_ortho_rmatrix(self,vectors):
"""
Orthornormal set of real vectors are calculated
Attributes:
vectors: A matrix whose columns are non-orthogonal set real vectors
Return:
orthonormal_vec: A matrix whose columns are orthonormal to each other
"""
orthonormal_vec=np.zeros((vectors.shape[0],vectors.shape[1]),
dtype='float64')
for col in range(0,vectors.shape[1]):
if col != 0:
for col2 in range(0,col):
tr=0.0
for row2 in range(0,vectors.shape[0]):
tr=tr+(orthonormal_vec[row2,col2]*vectors[row2,col])
orthonormal_vec[:,col]=orthonormal_vec[:,col]+\
(tr*orthonormal_vec[:,col2])
orthonormal_vec[:,col]=vectors[:,col]-orthonormal_vec[:,col]
if col == 0:
orthonormal_vec[:,col]=vectors[:,col]
tr=0.0
for row in range(0,vectors.shape[0]):
tr=tr+(orthonormal_vec[row,col]*orthonormal_vec[row,col])
orthonormal_vec[:,col]=orthonormal_vec[:,col]/np.sqrt(tr)
return orthonormal_vec
def gram_schmidt_ortho_cmatrix(self,vectors):
"""
Orthornormal set of complex vectors are calculated
Attributes:
vectors: A matrix whose columns are non-orthogonal set
complex vectors
Return:
orthonormal_vec: A matrix whose columns are orthonormal to each other
"""
orthonormal_vec=np.zeros((vectors.shape[0],vectors.shape[1]),\
dtype=np.complex_)
for col in range(0,vectors.shape[1]):
if col != 0:
orthonormal_vec[:,col]=vectors[:,col].copy()
for col2 in range(0,col):
tr=complex(0.0,0.0)
for row2 in range(0,vectors.shape[0]):
tr=tr+(np.conjugate(orthonormal_vec[row2,col2])\
*vectors[row2,col])
orthonormal_vec[:,col]=orthonormal_vec[:,col]-\
(tr*\
orthonormal_vec[:,col2].copy())
if col == 0:
orthonormal_vec[:,col]=vectors[:,col].copy()
tr=complex(0.0,0.0)
for row in range(0,vectors.shape[0]):
tr=tr+(np.conjugate(orthonormal_vec[row,col])*\
orthonormal_vec[row,col])
orthonormal_vec[:,col]=orthonormal_vec[:,col]/np.sqrt(tr.real)
return orthonormal_vec
| 41.557484 | 85 | 0.538 | 2,297 | 19,158 | 4.443187 | 0.07401 | 0.027631 | 0.02038 | 0.023712 | 0.891828 | 0.856555 | 0.843425 | 0.814815 | 0.780619 | 0.76847 | 0 | 0.02619 | 0.356248 | 19,158 | 460 | 86 | 41.647826 | 0.801346 | 0.258795 | 0 | 0.732794 | 0 | 0 | 0.045514 | 0 | 0 | 0 | 0 | 0 | 0.036437 | 1 | 0.064777 | false | 0 | 0.016194 | 0 | 0.149798 | 0.004049 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0594b004794177d3befa045a87e524ad9525fd99 | 171 | py | Python | src/entity/dataCenter.py | Lukeeeeee/DataCenterJobSchedulingSolution | 9c62c0039b2dd9e0a1ca5474dc46c8be98a972b3 | [
"MIT"
] | null | null | null | src/entity/dataCenter.py | Lukeeeeee/DataCenterJobSchedulingSolution | 9c62c0039b2dd9e0a1ca5474dc46c8be98a972b3 | [
"MIT"
] | null | null | null | src/entity/dataCenter.py | Lukeeeeee/DataCenterJobSchedulingSolution | 9c62c0039b2dd9e0a1ca5474dc46c8be98a972b3 | [
"MIT"
] | null | null | null | class DataCenter(object):
def __init__(self, history_data):
self.heat_list = history_data['HEAT']
def get_heat(self, t):
return self.heat_list[t]
| 24.428571 | 45 | 0.660819 | 24 | 171 | 4.333333 | 0.541667 | 0.211538 | 0.230769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.222222 | 171 | 6 | 46 | 28.5 | 0.781955 | 0 | 0 | 0 | 0 | 0 | 0.023392 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0 | 0.2 | 0.8 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
559755a0b12cf4d202cd1510d92d84ece0566171 | 31,952 | py | Python | users/tests/test_account_views.py | andywar65/rp_repo | 726c1426d738b962cabeabd8995aa35767df0c41 | [
"BSD-2-Clause"
] | null | null | null | users/tests/test_account_views.py | andywar65/rp_repo | 726c1426d738b962cabeabd8995aa35767df0c41 | [
"BSD-2-Clause"
] | null | null | null | users/tests/test_account_views.py | andywar65/rp_repo | 726c1426d738b962cabeabd8995aa35767df0c41 | [
"BSD-2-Clause"
] | null | null | null | from datetime import date, timedelta
from django.test import TestCase
from django.urls import reverse
from users.models import User, CourseSchedule
class AccountViewTest(TestCase):
@classmethod
def setUpTestData(cls):
# Set up non-modified objects used by all test methods
user0 = User.objects.create_user(username='user_0',
password='P4s5W0r6')
profile = user0.profile
profile.sector = '0-NO'
profile.is_trusted = True
profile.save()
untrusty = User.objects.create_user(username='untrusty',
password='P4s5W0r6')
profile = untrusty.profile
profile.sector = '3-FI'
profile.is_trusted = False
profile.save()
uncomplete = User.objects.create_user(username='uncomplete',
password='P4s5W0r6')
profile = uncomplete.profile
profile.sector = '3-FI'
profile.is_trusted = True
profile.save()
trustyparent = User.objects.create_user(username='trustyparent',
password='P4s5W0r6', first_name='Trusty', email='trusty@example.com')
profile = trustyparent.profile
profile.sector = '3-FI'
profile.is_trusted = True
profile.fiscal_code = 'GRRNDR65D13F839E'
profile.save()
trustychild = User.objects.create_user(username='trustychild',
password='P4s5W0r6', first_name='Child', last_name='Trusty')
profile = trustychild.profile
profile.parent = trustyparent
profile.sector = '1-YC'
profile.date_of_birth = date.today() - timedelta(days=365*15)
profile.save()
adultchild = User.objects.create_user(username='adultchild',
password='P4s5W0r6', first_name='Adult', last_name='Trusty')
profile = adultchild.profile
profile.parent = trustyparent
profile.sector = '1-YC'
profile.date_of_birth = date.today() - timedelta(days=365*19)
profile.save()
user1 = User.objects.create_user(username='user_1',
password='P4s5W0r6')
profile = user1.profile
profile.sector = '1-YC'
profile.save()
user2 = User.objects.create_user(username='user_2',
password='P4s5W0r6', email='user_two@example.com')
profile = user2.profile
profile.sector = '2-NC'
profile.save()
course = CourseSchedule.objects.create(abbrev='ALT', full='Altro')
#testing add child view
def test_profile_add_child_view_404_sector_0(self):
self.client.post('/accounts/login/', {'username':'user_0',
'password':'P4s5W0r6'})
response = self.client.get(reverse('profile_add_child'))
self.assertEqual(response.status_code, 404 )
def test_profile_add_child_view_404_untrusted(self):
self.client.post('/accounts/login/', {'username':'untrusty',
'password':'P4s5W0r6'})
response = self.client.get(reverse('profile_add_child'))
self.assertEqual(response.status_code, 404 )
def test_profile_add_child_view_404_uncomplete(self):
self.client.post('/accounts/login/', {'username':'uncomplete',
'password':'P4s5W0r6'})
response = self.client.get(reverse('profile_add_child'))
self.assertEqual(response.status_code, 404 )
def test_profile_add_child_view_status_code(self):
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(reverse('profile_add_child'))
self.assertEqual(response.status_code, 200 )
def test_profile_add_child_view_template(self):
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(reverse('profile_add_child'))
self.assertTemplateUsed(response, 'users/profile_add_child.html' )
def test_profile_add_child_view_post_status_code(self):
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.post(reverse('profile_add_child'),
{'first_name': 'Child', 'last_name': 'Trusty'})
self.assertEqual(response.status_code, 302 )
def test_profile_add_child_view_post_redirects(self):
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.post(reverse('profile_add_child'),
{'first_name': 'Child', 'last_name': 'Trusty'})
self.assertRedirects(response, '/accounts/profile/?child_created=True' )
#testing template account view
def test_template_account_view_status_code_sector_1(self):
self.client.post('/accounts/login/', {'username':'user_1',
'password':'P4s5W0r6'})
response = self.client.get(reverse('profile'))
self.assertEqual(response.status_code, 200 )
def test_template_account_view_template_sector_1(self):
self.client.post('/accounts/login/', {'username':'user_1',
'password':'P4s5W0r6'})
response = self.client.get(reverse('profile'))
self.assertTemplateUsed(response, 'users/account_1.html' )
def test_template_account_view_status_code_sector_2(self):
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(reverse('profile'))
self.assertEqual(response.status_code, 200 )
def test_template_account_view_template_sector_2(self):
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(reverse('profile'))
self.assertTemplateUsed(response, 'users/account_2.html' )
#testing profile change view
def test_profile_change_view_not_logged(self):
user = User.objects.get(username='user_2')
response = self.client.get(reverse('profile_change',
kwargs={'pk': user.id}))
self.assertEqual(response.status_code, 302 )
def test_profile_change_view_not_logged_redirects(self):
user = User.objects.get(username='user_2')
response = self.client.get(reverse('profile_change',
kwargs={'pk': user.id}))
self.assertRedirects(response,
f'/accounts/login/?next=/accounts/profile/{user.id}/change/' )
def test_profile_change_view_404_wrong_id(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(reverse('profile_change', kwargs={'pk': 404}))
self.assertEqual(response.status_code, 404 )
def test_profile_change_view_status_code(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(reverse('profile_change',
kwargs={'pk': user.id}))
self.assertEqual(response.status_code, 200 )
def test_profile_change_view_template(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(reverse('profile_change',
kwargs={'pk': user.id}))
self.assertTemplateUsed(response, 'users/profile_change.html' )
def test_profile_change_view_post_status_code(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
#enter all required fields!
response = self.client.post(reverse('profile_change',
kwargs={'pk': user.id}), {'sector': '2-NC', 'first_name': 'User',
'last_name': 'Two', 'email': 'user_2@example.com'})
self.assertEqual(response.status_code, 302 )
def test_profile_change_view_post_redirects(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.post(reverse('profile_change',
kwargs={'pk': user.id}), {'sector': '2-NC', 'first_name': 'User',
'last_name': 'Two', 'email': 'user_2@example.com'})
self.assertRedirects(response,
'/accounts/profile/?submitted=User%20Two' )
#testing profile change view child
def test_profile_change_view_child_not_logged(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
response = self.client.get(f'/accounts/profile/{child.id}/change/?parent={parent.id}')
self.assertEqual(response.status_code, 302 )
def test_profile_change_view_child_not_logged_redirects(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
response = self.client.get(f'/accounts/profile/{child.id}/change/?parent={parent.id}')
self.assertRedirects(response,
f'/accounts/login/?next=/accounts/profile/{child.id}/change/?parent={parent.id}' )
def test_profile_change_view_child_404_wrong_parent(self):
parent = User.objects.get(username='user_2')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/change/?parent={parent.id}')
self.assertEqual(response.status_code, 404 )
def test_profile_change_view_child_404_wrong_child(self):
parent = User.objects.get(username='trustyparent')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/404/change/?parent={parent.id}')
self.assertEqual(response.status_code, 404 )
def test_profile_change_view_child_status_code(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/change/?parent={parent.id}')
self.assertEqual(response.status_code, 200 )
def test_profile_change_view_child_template(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/change/?parent={parent.id}')
self.assertTemplateUsed(response, 'users/profile_change_child.html' )
def test_profile_change_view_child_post_status_code(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{child.id}/change/?parent={parent.id}',
{'first_name': child.first_name, 'last_name': child.last_name,
'email': parent.email})
self.assertEqual(response.status_code, 302 )
def test_profile_change_view_child_post_redirects(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{child.id}/change/?parent={parent.id}',
{'first_name': child.first_name, 'last_name': child.last_name,
'email': parent.email})
self.assertRedirects(response,
f'/accounts/profile/?submitted={child.first_name}%20{child.last_name}' )
#testing profile change registry view
def test_profile_change_registry_not_logged(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
response = self.client.get(f'/accounts/profile/{child.id}/change/registry/?parent={parent.id}')
self.assertEqual(response.status_code, 302 )
def test_profile_change_registry_not_logged_redirects(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
response = self.client.get(f'/accounts/profile/{child.id}/change/registry/?parent={parent.id}')
self.assertRedirects(response,
f'/accounts/login/?next=/accounts/profile/{child.id}/change/registry/?parent={parent.id}' )
def test_profile_change_registry_404_wrong_parent(self):
parent = User.objects.get(username='user_2')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/change/registry/?parent={parent.id}')
self.assertEqual(response.status_code, 404 )
def test_profile_change_registry_404_wrong_id(self):
parent = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get('/accounts/profile/404/change/registry/')
self.assertEqual(response.status_code, 404 )
def test_profile_change_registry_404_wrong_child(self):
parent = User.objects.get(username='trustyparent')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/404/change/registry/?parent={parent.id}')
self.assertEqual(response.status_code, 404 )
def test_profile_change_registry_status_code(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/change/registry/?parent={parent.id}')
self.assertEqual(response.status_code, 200 )
def test_profile_change_registry_template(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/change/registry/?parent={parent.id}')
self.assertTemplateUsed(response, 'users/profile_change_registry.html' )
def test_profile_change_registry_post_status_code(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{child.id}/change/registry/?parent={parent.id}',
{'gender': 'F', 'date_of_birth_day': '4',
'date_of_birth_month': '6', 'date_of_birth_year': '1999',
'place_of_birth': 'Roma', 'nationality': 'Italiana',
'fiscal_code': 'GRRNNA99H44H501X'})
self.assertEqual(response.status_code, 302 )
def test_profile_change_registry_post_redirects(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{child.id}/change/registry/?parent={parent.id}',
{'gender': 'F', 'date_of_birth_day': '4',
'date_of_birth_month': '6', 'date_of_birth_year': '1999',
'place_of_birth': 'Roma', 'nationality': 'Italiana',
'fiscal_code': 'GRRNNA99H44H501X'})
self.assertRedirects(response,
f'/accounts/profile/?submitted={child.first_name}%20{child.last_name}' )
#testing profile change course view
def test_profile_change_course_not_logged(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
response = self.client.get(f'/accounts/profile/{child.id}/change/course/?parent={parent.id}')
self.assertEqual(response.status_code, 302 )
def test_profile_change_course_not_logged_redirects(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
response = self.client.get(f'/accounts/profile/{child.id}/change/course/?parent={parent.id}')
self.assertRedirects(response,
f'/accounts/login/?next=/accounts/profile/{child.id}/change/course/?parent={parent.id}' )
def test_profile_change_course_404_wrong_parent(self):
parent = User.objects.get(username='user_2')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/change/course/?parent={parent.id}')
self.assertEqual(response.status_code, 404 )
def test_profile_change_course_404_wrong_id(self):
parent = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get('/accounts/profile/404/change/course/')
self.assertEqual(response.status_code, 404 )
def test_profile_change_course_404_wrong_child(self):
parent = User.objects.get(username='trustyparent')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/404/change/course/?parent={parent.id}')
self.assertEqual(response.status_code, 404 )
def test_profile_change_course_status_code(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/change/course/?parent={parent.id}')
self.assertEqual(response.status_code, 200 )
def test_profile_change_course_template(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/change/course/?parent={parent.id}')
self.assertTemplateUsed(response, 'users/profile_change_course.html' )
def test_profile_change_course_post_status_code(self):
course = CourseSchedule.objects.get(abbrev='ALT')
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{child.id}/change/course/?parent={parent.id}',
{'course': [course.id], 'course_alt': 'Foo',
'course_membership': 'INTU'})
self.assertEqual(response.status_code, 302 )
def test_profile_change_course_post_redirects(self):
course = CourseSchedule.objects.get(abbrev='ALT')
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{child.id}/change/course/?parent={parent.id}',
{'course': [course.id], 'course_alt': 'Foo',
'course_membership': 'INTU'})
self.assertRedirects(response,
f'/accounts/profile/?submitted={child.first_name}%20{child.last_name}' )
#testing profile change address view
def test_profile_change_address_not_logged(self):
user = User.objects.get(username='user_2')
response = self.client.get(f'/accounts/profile/{user.id}/change/address/')
self.assertEqual(response.status_code, 302 )
def test_profile_change_address_404_wrong_id(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get('/accounts/profile/404/change/address/')
self.assertEqual(response.status_code, 404 )
def test_profile_change_address_status_code(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{user.id}/change/address/')
self.assertEqual(response.status_code, 200 )
def test_profile_change_address_template(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{user.id}/change/address/')
self.assertTemplateUsed(response, 'users/profile_change_address.html' )
def test_profile_change_address_post_status_code(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{user.id}/change/address/',
{'fiscal_code': 'GRRNDR65D13F839E', 'address': 'Where?',
'phone': '123456789'})
self.assertEqual(response.status_code, 302 )
def test_profile_change_address_post_redirects(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{user.id}/change/address/',
{'fiscal_code': 'GRRNDR65D13F839E', 'address': 'Where?',
'phone': '123456789'})
self.assertRedirects(response,
f'/accounts/profile/?submitted={user.username}' )
#testing profile change no_course view
def test_profile_change_no_course_not_logged(self):
user = User.objects.get(username='user_2')
response = self.client.get(f'/accounts/profile/{user.id}/change/no_course/')
self.assertEqual(response.status_code, 302 )
def test_profile_change_no_course_404_wrong_id(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get('/accounts/profile/404/change/no_course/')
self.assertEqual(response.status_code, 404 )
def test_profile_change_no_course_status_code(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{user.id}/change/no_course/')
self.assertEqual(response.status_code, 200 )
def test_profile_change_no_course_template(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{user.id}/change/no_course/')
self.assertTemplateUsed(response, 'users/profile_change_no_course.html' )
def test_profile_change_no_course_post_status_code(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{user.id}/change/no_course/',
{'no_course_membership': 'FID'})
self.assertEqual(response.status_code, 302 )
def test_profile_change_no_course_post_redirects(self):
user = User.objects.get(username='user_2')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{user.id}/change/no_course/',
{'no_course_membership': 'FID'})
self.assertRedirects(response,
f'/accounts/profile/?submitted={user.username}' )
#testing profile delete child view
def test_profile_delete_child_view_not_logged(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
response = self.client.get(f'/accounts/profile/{child.id}/delete_child/')
self.assertEqual(response.status_code, 302 )
def test_profile_delete_child_view_not_logged_redirects(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
response = self.client.get(f'/accounts/profile/{child.id}/delete_child/')
self.assertRedirects(response,
f'/accounts/login/?next=/accounts/profile/{child.id}/delete_child/' )
def test_profile_delete_child_view_404_wrong_parent(self):
parent = User.objects.get(username='user_2')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/delete_child/')
self.assertEqual(response.status_code, 404 )
def test_profile_delete_child_view_404_wrong_child(self):
parent = User.objects.get(username='trustyparent')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/404/delete_child/')
self.assertEqual(response.status_code, 404 )
def test_profile_delete_child_view_status_code(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/delete_child/')
self.assertEqual(response.status_code, 200 )
def test_profile_delete_child_view_template(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/delete_child/')
self.assertTemplateUsed(response, 'users/profile_delete_child.html' )
def test_profile_delete_child_view_post_status_code(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{child.id}/delete_child/',
{'delete': True})
self.assertEqual(response.status_code, 302 )
def test_profile_delete_child_view_post_redirects(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{child.id}/delete_child/',
{'delete': True}, follow=True)
self.assertRedirects(response,
f'/accounts/profile/deleted_child/' )
#testing profile release view
def test_profile_release_view_not_logged(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
response = self.client.get(f'/accounts/profile/{child.id}/release/')
self.assertEqual(response.status_code, 302 )
def test_profile_release_view_not_logged_redirects(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
response = self.client.get(f'/accounts/profile/{child.id}/release/')
self.assertRedirects(response,
f'/accounts/login/?next=/accounts/profile/{child.id}/release/' )
def test_profile_release_view_404_wrong_parent(self):
parent = User.objects.get(username='user_2')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'user_2',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/release/')
self.assertEqual(response.status_code, 404 )
def test_profile_release_view_404_wrong_child(self):
parent = User.objects.get(username='trustyparent')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/404/release/')
self.assertEqual(response.status_code, 404 )
def test_profile_release_view_404_not_adult(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='trustychild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/release/')
self.assertEqual(response.status_code, 404 )
def test_profile_release_view_status_code(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='adultchild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/release/')
self.assertEqual(response.status_code, 200 )
def test_profile_release_view_template(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='adultchild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.get(f'/accounts/profile/{child.id}/release/')
self.assertTemplateUsed(response, 'users/profile_release.html' )
def test_profile_release_view_post_status_code(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='adultchild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{child.id}/release/',
{'release': True})
self.assertEqual(response.status_code, 302 )
def test_profile_release_view_post_redirects(self):
parent = User.objects.get(username='trustyparent')
child = User.objects.get(username='adultchild')
self.client.post('/accounts/login/', {'username':'trustyparent',
'password':'P4s5W0r6'})
response = self.client.post(f'/accounts/profile/{child.id}/release/',
{'release': True}, follow=True)
self.assertRedirects(response,
f'/accounts/profile/released/' )
| 49.692068 | 104 | 0.663558 | 3,661 | 31,952 | 5.609669 | 0.03988 | 0.064274 | 0.066806 | 0.104981 | 0.943371 | 0.91771 | 0.888932 | 0.866095 | 0.852559 | 0.835419 | 0 | 0.025443 | 0.188157 | 31,952 | 642 | 105 | 49.76947 | 0.766268 | 0.012268 | 0 | 0.733456 | 0 | 0.005515 | 0.284845 | 0.120392 | 0 | 0 | 0 | 0 | 0.134191 | 1 | 0.136029 | false | 0.123162 | 0.007353 | 0 | 0.145221 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
55aa84b675faba80e3588e3150b679fe6d73d91e | 101 | py | Python | test/main.py | rethab/action-google-styleguide | dc91636d9e7d2ec794c7b7351e99511fc3c03a64 | [
"MIT"
] | null | null | null | test/main.py | rethab/action-google-styleguide | dc91636d9e7d2ec794c7b7351e99511fc3c03a64 | [
"MIT"
] | 13 | 2021-08-08T11:04:51.000Z | 2021-08-20T08:20:01.000Z | test/main.py | rethab/action-mypy-fancy | 7b00e4c99101639fa29f3ec693e5f9ec30e8b83a | [
"MIT"
] | null | null | null | def add(a: str, b: int) -> int:
return a + b
def add2(a: str, b: int) -> str:
return a + b
| 14.428571 | 32 | 0.50495 | 20 | 101 | 2.55 | 0.4 | 0.156863 | 0.196078 | 0.313725 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014493 | 0.316832 | 101 | 6 | 33 | 16.833333 | 0.724638 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
e984717de5b3f7350627543888b77f70544b9b9e | 177 | py | Python | scraper/domain/dish.py | boscar98/MMM-VUB-Resto | 2488dbfafb8dfce2868d78ab76352b876558168f | [
"MIT"
] | null | null | null | scraper/domain/dish.py | boscar98/MMM-VUB-Resto | 2488dbfafb8dfce2868d78ab76352b876558168f | [
"MIT"
] | 1 | 2022-02-23T16:07:49.000Z | 2022-02-23T16:07:49.000Z | scraper/domain/dish.py | OscarVsp/MMM-VUB-Resto | 2488dbfafb8dfce2868d78ab76352b876558168f | [
"MIT"
] | null | null | null | class Dish:
def __init__(self, name, type):
self.name = name
self.type = type
def __str__(self):
return '{}: {}'.format(self.type, self.name)
| 17.7 | 52 | 0.553672 | 22 | 177 | 4.090909 | 0.454545 | 0.266667 | 0.266667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.299435 | 177 | 9 | 53 | 19.666667 | 0.725806 | 0 | 0 | 0 | 0 | 0 | 0.034286 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0.166667 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
75a5956adca823b0f17bf8d1ac55e5c466f62680 | 92 | py | Python | faster_rcnn/symbols/__init__.py | ucf-arcc/Faster_RCNN_for_DOTA | dba36aba6d3e5f694ded20cf4b15d5a1239b7bd6 | [
"Apache-2.0"
] | 344 | 2018-04-30T05:33:35.000Z | 2022-03-30T15:38:13.000Z | faster_rcnn/symbols/__init__.py | ucf-arcc/Faster_RCNN_for_DOTA | dba36aba6d3e5f694ded20cf4b15d5a1239b7bd6 | [
"Apache-2.0"
] | 39 | 2018-05-03T12:51:40.000Z | 2021-10-18T12:40:55.000Z | faster_rcnn/symbols/__init__.py | ucf-arcc/Faster_RCNN_for_DOTA | dba36aba6d3e5f694ded20cf4b15d5a1239b7bd6 | [
"Apache-2.0"
] | 113 | 2018-04-30T03:34:42.000Z | 2021-11-27T09:57:43.000Z | import resnet_v1_101_rcnn
import resnet_v1_101_rcnn_dcn
import resnet_v1_101_rcnn_quadrangle | 30.666667 | 36 | 0.945652 | 17 | 92 | 4.470588 | 0.411765 | 0.473684 | 0.552632 | 0.671053 | 0.828947 | 0 | 0 | 0 | 0 | 0 | 0 | 0.137931 | 0.054348 | 92 | 3 | 36 | 30.666667 | 0.735632 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
75b521554dd512562467c36dfda53111e01e1903 | 30,770 | py | Python | src/networks/mdrnns/gru.py | ccoay/ec-extraction | e5555ba010420b4cdf57492be98dce4707052a4b | [
"MIT"
] | null | null | null | src/networks/mdrnns/gru.py | ccoay/ec-extraction | e5555ba010420b4cdf57492be98dce4707052a4b | [
"MIT"
] | null | null | null | src/networks/mdrnns/gru.py | ccoay/ec-extraction | e5555ba010420b4cdf57492be98dce4707052a4b | [
"MIT"
] | null | null | null | import math
import copy
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
from tqdm import tqdm
from typing import *
import torch.jit as jit
### 2d ###
class GRU2dCell(jit.ScriptModule):
__constants__ = ['input_dim', 'state_dim']
def __init__(self, input_dim, state_dim, dropout=0.):
super().__init__()
self.input_dim = input_dim
self.state_dim = state_dim
self.Wi = nn.Linear(self.input_dim, self.state_dim*4)
self.Ws = nn.Linear(self.state_dim*2, self.state_dim*4)
@jit.script_method
def forward(self, x, s_prev0, s_prev1):
#s_prev0 = torch.zeros_like(s_prev0)
#s_prev1 = torch.zeros_like(s_prev1)
s = torch.cat([s_prev0, s_prev1], -1)
igates = self.Wi(x)
sgates = self.Ws(s)
gates = igates + sgates
# r_inv actual represents (1-r)
r_inv, i, n, l = gates.chunk(4, 1)
s_n = sgates[:, self.state_dim*2:self.state_dim*3]
l = l.sigmoid()
r_inv = r_inv.sigmoid()
i = i.sigmoid()
n = (n - r_inv*s_n).tanh() # <==> (i_n + r * s_n)
h = n + i * (l*s_prev0 + (1.-l)*s_prev1 - n)
return h
### 2d ###
class LNGRU2dCell(jit.ScriptModule):
__constants__ = ['input_dim', 'state_dim']
def __init__(self, input_dim, state_dim, dropout=0.):
super().__init__()
self.input_dim = input_dim
self.state_dim = state_dim
self.Wi = nn.Linear(self.input_dim, self.state_dim * 4, bias=None)
self.Ws = nn.Linear(self.state_dim * 2, self.state_dim * 4, bias=None)
self.LNi = nn.LayerNorm(self.state_dim * 4)
self.LNs = nn.LayerNorm(self.state_dim * 4)
self.LNh = nn.LayerNorm(self.state_dim)
self.dropout_layer = nn.Dropout(dropout, inplace=True)
@jit.script_method
def forward(self, x, s_prev0, s_prev1):
s = torch.cat([s_prev0, s_prev1], -1)
igates = self.dropout_layer(self.LNi(self.Wi(x)))
sgates = self.dropout_layer(self.LNs(self.Ws(s)))
gates = igates + sgates
# r_inv actual represents (1-r)
r_inv, i, n, l = gates.chunk(4, 1)
s_n = sgates[:, self.state_dim*2:self.state_dim*3]
l = l.sigmoid()
r_inv = r_inv.sigmoid()
i = i.sigmoid()
n = (n - r_inv*s_n).tanh() # <==> (i_n + r * s_n)
h = n + i * (l*s_prev0 + (1.-l)*s_prev1 - n)
h = self.dropout_layer(self.LNh(h))
return h
### 3d ###
class GRU3dCell(jit.ScriptModule):
__constants__ = ['input_dim', 'state_dim']
def __init__(self, input_dim, state_dim, dropout=0.):
super().__init__()
self.input_dim = input_dim
self.state_dim = state_dim
self.Wi = nn.Linear(self.input_dim, self.state_dim * 6)
self.Ws = nn.Linear(self.state_dim * 3, self.state_dim * 6)
@jit.script_method
def forward(self, x, s_prev0, s_prev1, s_prev2):
#s_prev2 = torch.zeros_like(s_prev2)
#s_prev0 = torch.zeros_like(s_prev0)
#s_prev1 = torch.zeros_like(s_prev1)
s = torch.cat([s_prev0, s_prev1, s_prev2], -1)
igates = self.Wi(x)
sgates = self.Ws(s)
gates = igates + sgates
# r_inv actual represents (1-r)
r_i_n, l = gates.chunk(2, 1)
r_inv, i, n = r_i_n.chunk(3, 1)
s_n = sgates[:, self.state_dim*2:self.state_dim*3]
l = l.view(-1, 3, self.state_dim).softmax(1) # weights for 3 hidden states
r_inv = r_inv.sigmoid()
i = i.sigmoid()
n = (n - r_inv*s_n).tanh() # <==> (i_n + r * s_n)
h = n + i * ( (l*s.view(-1,3,self.state_dim)).sum(1) - n)
return h
class GRU25dCell(GRU3dCell):
pass
class LNGRU3dCell(jit.ScriptModule):
__constants__ = ['input_dim', 'state_dim']
def __init__(self, input_dim, state_dim, dropout=0.):
super().__init__()
self.input_dim = input_dim
self.state_dim = state_dim
self.Wi = nn.Linear(self.input_dim, self.state_dim * 6, bias=None)
self.Ws = nn.Linear(self.state_dim * 3, self.state_dim * 6, bias=None)
self.LNi = nn.LayerNorm(self.state_dim * 6)
self.LNs = nn.LayerNorm(self.state_dim * 6)
self.LNh = nn.LayerNorm(self.state_dim)
self.dropout_layer = nn.Dropout(dropout, inplace=True)
@jit.script_method
def forward(self, x, s_prev0, s_prev1, s_prev2):
s = torch.cat([s_prev0, s_prev1, s_prev2], -1)
igates = self.dropout_layer(self.LNi(self.Wi(x)))
sgates = self.dropout_layer(self.LNs(self.Ws(s)))
gates = igates + sgates
# r_inv actual represents (1-r)
r_i_n, l = gates.chunk(2, 1)
r_inv, i, n = r_i_n.chunk(3, 1)
s_n = sgates[:, self.state_dim*2:self.state_dim*3]
l = l.view(-1, 3, self.state_dim).softmax(1) # weights for 3 hidden states
r_inv = r_inv.sigmoid()
i = i.sigmoid()
n = (n - r_inv*s_n).tanh() # <==> (i_n + r * s_n)
h = n + i * ( (l*s.view(-1,3,self.state_dim)).sum(1) - n)
h = self.dropout_layer(self.LNh(h))
return h
class LNGRU25dCell(LNGRU3dCell):
pass
############# Layer ###############
class GRU2dLayer(jit.ScriptModule):
__constants__ = ['emb_dim', 'hidden_dim']
def __init__(self, config, emb_dim=None, _Cell=LNGRU3dCell):
super().__init__()
self.config = config
if emb_dim is None:
self.emb_dim = config.hidden_dim
else:
self.emb_dim = emb_dim
self.hidden_dim = config.hidden_dim
self.cell = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
@jit.script_method
def forward(self, x: torch.Tensor, states: Optional[torch.Tensor], masks: torch.Tensor):
# x (B, T0, T1, H)
B, T0, T1, E = x.shape
H = self.hidden_dim
x = x.permute(1, 2, 0, 3) # (T0, T1, B, E)
x = x.flip(1)
masks = masks.permute(1, 2, 0).unsqueeze(-1).float()
masks = masks.flip(1)
if states is None:
states = torch.zeros(T0+1, T1+1, B, H, device=x.device) # (T0+1, T1+1*, B, H)
for offset in range(T1-1, -T0, -1):
x_current = x.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
diag_len = x_current.size(0)
new_batch_size = diag_len * B
x_current = x_current.view(new_batch_size, E)
# calculate previous hidden & cell states for this diagonal
s_current = states.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
s_prev0 = s_current[-diag_len:].view(new_batch_size, H)
s_prev1 = s_current[:diag_len].view(new_batch_size, H)
# run batched computation for this diagonal
s_next = self.cell(
x_current, s_prev0, s_prev1)
# separate batch and diag_len again so we can store them accordingly
to_save = s_next.view(diag_len, B, H)
to_save = to_save * masks.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1)
s_next = states.diagonal(offset=offset-1, dim1=0, dim2=1).permute(-1, 0, 1)
s_next[-diag_len-1:diag_len+1] = to_save
states_s = states[1:, :-1].permute(2, 0, 1, 3) # B, T0 T1 H*2
states_s = states_s.flip(2)
return states_s, states # (B, T, H), and (T0+1, T1+1*, B, H)
class BGRU2dLayer(jit.ScriptModule):
__constants__ = ['emb_dim', 'hidden_dim']
def __init__(self, config, emb_dim=None, _Cell=LNGRU2dCell):
super().__init__()
self.config = config
if emb_dim is None:
self.emb_dim = config.hidden_dim
else:
self.emb_dim = emb_dim
self.hidden_dim = config.hidden_dim // 2
self.cellf = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
self.cellb = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
@jit.script_method
def forward(self,
x: torch.Tensor,
states: Optional[torch.Tensor],
masks: torch.Tensor):
assert states is None
# x (B, T0, T1, H)
B, T0, T1, E = x.shape
H = self.hidden_dim
x = x.permute(1, 2, 0, 3) # (T0, T1, B, E)
x = torch.cat([x.flip(1), x.flip(0)], -1)
masks = masks.permute(1, 2, 0).unsqueeze(-1).float().repeat(1, 1, 1, H) # (T0, T1, B, H)
masks = torch.cat([masks.flip(1), masks.flip(0)], -1)
states = torch.zeros(T0+1, T1+1, B, H*2, device=x.device) # (T0+1, T1+1*, B, H)
for offset in range(T1-1, -T0, -1):
x_current = x.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
diag_len = x_current.size(0)
new_batch_size = diag_len * B
x_current = x_current.view(new_batch_size, E*2)
x_current_f, x_current_b = x_current.chunk(2, -1)
# calculate previous hidden & cell states for this diagonal
s_current = states.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
s_prev_f0, s_prev_b0 = s_current[-diag_len:].view(new_batch_size, H*2).chunk(2, 1)
s_prev_f1, s_prev_b1 = s_current[:diag_len].view(new_batch_size, H*2).chunk(2, 1)
# run batched computation for this diagonal
s_next_f = self.cellf(
x_current_f, s_prev_f0, s_prev_f1)
s_next_b = self.cellb(
x_current_b, s_prev_b0, s_prev_b1)
# separate batch and diag_len again so we can store them accordingly
to_save = torch.cat([s_next_f, s_next_b], -1).view(diag_len, B, H*2)
to_save = to_save * masks.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1)
s_next = states.diagonal(offset=offset-1, dim1=0, dim2=1).permute(-1, 0, 1)
s_next[-diag_len-1:diag_len+1] = to_save
states_s = states[1:, :-1].permute(2, 0, 1, 3) # B, T0 T1 H*2
tmp0, tmp1 = states_s.chunk(2, -1)
states_s = torch.cat([tmp0.flip(2), tmp1.flip(1)], -1)
return states_s, states # (B, T, H), and (T0+1, T1+1*, B, H)
class BsGRU2dLayer(jit.ScriptModule):
__constants__ = ['emb_dim', 'hidden_dim']
def __init__(self, config, emb_dim=None, _Cell=LNGRU2dCell):
super().__init__()
self.config = config
if emb_dim is None:
self.emb_dim = config.hidden_dim
else:
self.emb_dim = emb_dim
self.hidden_dim = config.hidden_dim // 2
self.cellf = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
self.cellb = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
@jit.script_method
def forward(self,
x: torch.Tensor,
states: Optional[torch.Tensor],
masks: torch.Tensor):
assert states is None
# x (B, T0, T1, H)
B, T0, T1, E = x.shape
H = self.hidden_dim
x = x.permute(1, 2, 0, 3) # (T0, T1, B, E)
x = torch.cat([x, x.flip(0).flip(1)], -1)
masks = masks.permute(1, 2, 0).unsqueeze(-1).float().repeat(1, 1, 1, H) # (T0, T1, B, H)
masks = torch.cat([masks, masks.flip(0).flip(1)], -1)
states = torch.zeros(T0+1, T1+1, B, H*2, device=x.device) # (T0+1, T1+1*, B, H)
for offset in range(T1-1, -T0, -1):
x_current = x.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
diag_len = x_current.size(0)
new_batch_size = diag_len * B
x_current = x_current.view(new_batch_size, E*2)
x_current_f, x_current_b = x_current.chunk(2, -1)
# calculate previous hidden & cell states for this diagonal
s_current = states.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
s_prev_f0, s_prev_b0 = s_current[-diag_len:].view(new_batch_size, H*2).chunk(2, 1)
s_prev_f1, s_prev_b1 = s_current[:diag_len].view(new_batch_size, H*2).chunk(2, 1)
# run batched computation for this diagonal
s_next_f = self.cellf(
x_current_f, s_prev_f0, s_prev_f1)
s_next_b = self.cellb(
x_current_b, s_prev_b0, s_prev_b1)
# separate batch and diag_len again so we can store them accordingly
to_save = torch.cat([s_next_f, s_next_b], -1).view(diag_len, B, H*2)
to_save = to_save * masks.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1)
s_next = states.diagonal(offset=offset-1, dim1=0, dim2=1).permute(-1, 0, 1)
s_next[-diag_len-1:diag_len+1] = to_save
states_s = states[1:, :-1].permute(2, 0, 1, 3) # B, T0 T1 H*2
tmp0, tmp1 = states_s.chunk(2, -1)
states_s = torch.cat([tmp0, tmp1.flip(1).flip(2)], -1)
return states_s, states # (B, T, H), and (T0+1, T1+1*, B, H)
class QGRU2dLayer(jit.ScriptModule):
__constants__ = ['emb_dim', 'hidden_dim']
def __init__(self, configs, emb_dim=None, _Cell=LNGRU2dCell):
super().__init__()
self.configs = configs
if emb_dim is None:
self.emb_dim = configs.hidden_dim
else:
self.emb_dim = emb_dim
self.hidden_dim = configs.hidden_dim // 4
self.cella = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
self.cellb = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
self.cellc = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
self.celld = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
@jit.script_method
def forward(self,
x: torch.Tensor,
states: Optional[torch.Tensor],
masks: torch.Tensor):
assert states is None
# x (B, T0, T1, H)
B, T0, T1, E = x.shape
H = self.hidden_dim
x = x.permute(1, 2, 0, 3) # (T0, T1, B, E)
x = torch.cat([x.flip(1), x.flip(0), x, x.flip(1).flip(0)], -1)
masks = masks.permute(1, 2, 0).unsqueeze(-1).float().repeat(1, 1, 1, H) # (T0, T1, B, H)
masks = torch.cat([masks.flip(1), masks.flip(0), masks, masks.flip(1).flip(0)], -1)
states = torch.zeros(T0+1, T1+1, B, H*4, device=x.device) # (T0+1, T1+1*, B, H)
for offset in range(T1-1, -T0, -1):
x_current = x.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
diag_len = x_current.size(0)
new_batch_size = diag_len * B
x_current = x_current.view(new_batch_size, E*4)
x_current_a, x_current_b, x_current_c, x_current_d = x_current.chunk(4, -1)
# calculate previous hidden & cell states for this diagonal
s_current = states.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
s_prev_a0, s_prev_b0, s_prev_c0, s_prev_d0 = s_current[-diag_len:].view(new_batch_size, H*4).chunk(4, 1)
s_prev_a1, s_prev_b1, s_prev_c1, s_prev_d1 = s_current[:diag_len].view(new_batch_size, H*4).chunk(4, 1)
# run batched computation for this diagonal
s_next_a = self.cella(
x_current_a, s_prev_a0, s_prev_a1)
s_next_b = self.cellb(
x_current_b, s_prev_b0, s_prev_b1)
s_next_c = self.cellc(
x_current_c, s_prev_c0, s_prev_c1)
s_next_d = self.celld(
x_current_d, s_prev_d0, s_prev_d1)
# separate batch and diag_len again so we can store them accordingly
to_save = torch.cat([s_next_a, s_next_b, s_next_c, s_next_d], -1).view(diag_len, B, H*4)
to_save = to_save * masks.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1)
s_next = states.diagonal(offset=offset-1, dim1=0, dim2=1).permute(-1, 0, 1)
s_next[-diag_len-1:diag_len+1] = to_save
states_s = states[1:, :-1].permute(2, 0, 1, 3) # B, T0 T1 H*4
tmp0, tmp1, tmp2, tmp3 = states_s.chunk(4, -1)
states_s = torch.cat([tmp0.flip(2), tmp1.flip(1), tmp2, tmp3.flip(1).flip(2)], -1)
return states_s, states # (B, T, H), and (T0+1, T1+1*, B, H)
class GRU25dLayer(jit.ScriptModule):
__constants__ = ['emb_dim', 'hidden_dim']
def __init__(self, config, emb_dim=None, _Cell=LNGRU3dCell):
super().__init__()
self.config = config
if emb_dim is None:
self.emb_dim = config.hidden_dim
else:
self.emb_dim = emb_dim
self.hidden_dim = config.hidden_dim
self.cell = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
@jit.script_method
def forward(self, x: torch.Tensor, states: Optional[torch.Tensor], masks: torch.Tensor):
# x (B, T0, T1, H)
B, T0, T1, E = x.shape
H = self.hidden_dim
x = x.permute(1, 2, 0, 3) # (T0, T1, B, E)
x = x.flip(1)
masks = masks.permute(1, 2, 0).unsqueeze(-1).float()
masks = masks.flip(1)
if states is None:
states_in = torch.zeros(T0+1, T1+1, B, H, device=x.device) # (T0+1, T1+1*, B, H)
states = states_in.clone()
else:
states_in = states
states = torch.zeros(T0+1, T1+1, B, H, device=x.device)
for offset in range(T1-1, -T0, -1):
x_current = x.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
diag_len = x_current.size(0)
new_batch_size = diag_len * B
x_current = x_current.view(new_batch_size, E)
# calculate previous hidden & cell states for this diagonal
s_current = states.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
s_next = states_in.diagonal(offset=offset-1, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
s_prev0 = s_current[-diag_len:].view(new_batch_size, H)
s_prev1 = s_current[:diag_len].view(new_batch_size, H)
s_prev2 = s_next[-diag_len-1:diag_len+1].view(new_batch_size, H)
# run batched computation for this diagonal
s_next = self.cell(
x_current, s_prev0, s_prev1, s_prev2)
# separate batch and diag_len again so we can store them accordingly
to_save = s_next.view(diag_len, B, H)
to_save = to_save * masks.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1)
s_next = states.diagonal(offset=offset-1, dim1=0, dim2=1).permute(-1, 0, 1)
s_next[-diag_len-1:diag_len+1] = to_save
states_s = states[1:, :-1].permute(2, 0, 1, 3) # B, T0 T1 H*2
states_s = states_s.flip(2)
return states_s, states # (B, T, H), and (T0+1, T1+1*, B, H)
class BGRU25dLayer(jit.ScriptModule):
__constants__ = ['emb_dim', 'hidden_dim']
def __init__(self, config, emb_dim=None, _Cell=LNGRU3dCell):
super().__init__()
self.config = config
if emb_dim is None:
self.emb_dim = config.hidden_dim
else:
self.emb_dim = emb_dim
self.hidden_dim = config.hidden_dim // 2
self.cellf = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
self.cellb = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
@jit.script_method
def forward(self,
x: torch.Tensor,
states: Optional[torch.Tensor],
masks: torch.Tensor):
# x (B, T0, T1, H)
B, T0, T1, E = x.shape
H = self.hidden_dim
x = x.permute(1, 2, 0, 3) # (T0, T1, B, E)
x = torch.cat([x.flip(1), x.flip(0)], -1) # (T0, T1, B, E*2)
masks = masks.permute(1, 2, 0).unsqueeze(-1).float().repeat(1, 1, 1, H) # (T0, T1, B, H)
masks = torch.cat([masks.flip(1), masks.flip(0)], -1) # (T0, T1, B, H*2)
if states is None:
states_in = torch.zeros(T0+1, T1+1, B, H*2, device=x.device) # (T0+1, T1+1*, B, H)
states = states_in.clone()
else:
states_in = states
states = torch.zeros(T0+1, T1+1, B, H*2, device=x.device)
for offset in range(T1-1, -T0, -1):
x_current = x.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
diag_len = x_current.size(0)
new_batch_size = diag_len * B
x_current = x_current.view(new_batch_size, E*2)
x_current_f, x_current_b = x_current.chunk(2, -1)
# calculate previous hidden & cell states for this diagonal
s_current = states.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
s_next = states_in.diagonal(offset=offset-1, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
s_prev_f0, s_prev_b0 = s_current[-diag_len:].view(new_batch_size, H*2).chunk(2, 1)
s_prev_f1, s_prev_b1 = s_current[:diag_len].view(new_batch_size, H*2).chunk(2, 1)
s_prev_f2, s_prev_b2 = s_next[-diag_len-1:diag_len+1].view(new_batch_size, H*2).chunk(2, 1)
# run batched computation for this diagonal
s_next_f = self.cellf(
x_current_f, s_prev_f0, s_prev_f1, s_prev_f2)
s_next_b = self.cellb(
x_current_b, s_prev_b0, s_prev_b1, s_prev_b2)
# separate batch and diag_len again so we can store them accordingly
to_save = torch.cat([s_next_f, s_next_b], -1).view(diag_len, B, H*2)
to_save = to_save * masks.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1)
s_next = states.diagonal(offset=offset-1, dim1=0, dim2=1).permute(-1, 0, 1)
s_next[-diag_len-1:diag_len+1] = to_save
states_s = states[1:, :-1].permute(2, 0, 1, 3) # B, T0 T1 H*2
tmp0, tmp1 = states_s.chunk(2, -1)
states_s = torch.cat([tmp0.flip(2), tmp1.flip(1)], -1)
return states_s, states # (B, T, H), and (T0+1, T1+1*, B, H)
class BsGRU25dLayer(jit.ScriptModule):
__constants__ = ['emb_dim', 'hidden_dim']
def __init__(self, config, emb_dim=None, _Cell=LNGRU3dCell):
super().__init__()
self.config = config
if emb_dim is None:
self.emb_dim = config.hidden_dim
else:
self.emb_dim = emb_dim
self.hidden_dim = config.hidden_dim // 2
self.cellf = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
self.cellb = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
@jit.script_method
def forward(self,
x: torch.Tensor,
states: Optional[torch.Tensor],
masks: torch.Tensor):
# x (B, T0, T1, H)
B, T0, T1, E = x.shape
H = self.hidden_dim
x = x.permute(1, 2, 0, 3) # (T0, T1, B, E)
x = torch.cat([x, x.flip(0).flip(1)], -1) # (T0, T1, B, E*2)
masks = masks.permute(1, 2, 0).unsqueeze(-1).float().repeat(1, 1, 1, H) # (T0, T1, B, H)
masks = torch.cat([masks, masks.flip(0).flip(1)], -1) # (T0, T1, B, H*2)
if states is None:
states_in = torch.zeros(T0+1, T1+1, B, H*2, device=x.device) # (T0+1, T1+1*, B, H)
states = states_in.clone()
else:
states_in = states
states = torch.zeros(T0+1, T1+1, B, H*2, device=x.device)
for offset in range(T1-1, -T0, -1):
x_current = x.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
diag_len = x_current.size(0)
new_batch_size = diag_len * B
x_current = x_current.view(new_batch_size, E*2)
x_current_f, x_current_b = x_current.chunk(2, -1)
# calculate previous hidden & cell states for this diagonal
s_current = states.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
s_next = states_in.diagonal(offset=offset-1, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
s_prev_f0, s_prev_b0 = s_current[-diag_len:].view(new_batch_size, H*2).chunk(2, 1)
s_prev_f1, s_prev_b1 = s_current[:diag_len].view(new_batch_size, H*2).chunk(2, 1)
s_prev_f2, s_prev_b2 = s_next[-diag_len-1:diag_len+1].view(new_batch_size, H*2).chunk(2, 1)
# run batched computation for this diagonal
s_next_f = self.cellf(
x_current_f, s_prev_f0, s_prev_f1, s_prev_f2)
s_next_b = self.cellb(
x_current_b, s_prev_b0, s_prev_b1, s_prev_b2)
# separate batch and diag_len again so we can store them accordingly
to_save = torch.cat([s_next_f, s_next_b], -1).view(diag_len, B, H*2)
to_save = to_save * masks.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1)
s_next = states.diagonal(offset=offset-1, dim1=0, dim2=1).permute(-1, 0, 1)
s_next[-diag_len-1:diag_len+1] = to_save
states_s = states[1:, :-1].permute(2, 0, 1, 3) # B, T0 T1 H*2
tmp0, tmp1 = states_s.chunk(2, -1)
states_s = torch.cat([tmp0, tmp1.flip(1).flip(2)], -1)
return states_s, states # (B, T, H), and (T0+1, T1+1*, B, H)
class QGRU25dLayer(jit.ScriptModule):
__constants__ = ['emb_dim', 'hidden_dim']
def __init__(self, config, emb_dim=None, _Cell=LNGRU3dCell):
super().__init__()
self.config = config
if emb_dim is None:
self.emb_dim = config.hidden_dim
else:
self.emb_dim = emb_dim
self.hidden_dim = config.hidden_dim // 4
self.cella = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
self.cellb = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
self.cellc = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
self.celld = _Cell(self.emb_dim, self.hidden_dim, dropout=0.0)
@jit.script_method
def forward(self,
x: torch.Tensor,
states: Optional[torch.Tensor],
masks: torch.Tensor):
# x (B, T0, T1, H)
B, T0, T1, E = x.shape
H = self.hidden_dim
x = x.permute(1, 2, 0, 3) # (T0, T1, B, E)
x = torch.cat([x.flip(1), x.flip(0), x, x.flip(0).flip(1)], -1) # (T0, T1, B, E*4)
masks = masks.permute(1, 2, 0).unsqueeze(-1).float().repeat(1, 1, 1, H) # (T0, T1, B, H)
masks = torch.cat([masks.flip(1), masks.flip(0), masks, masks.flip(0).flip(1)], -1) # (T0, T1, B, H*4)
if states is None:
states_in = torch.zeros(T0+1, T1+1, B, H*4, device=x.device) # (T0+1, T1+1*, B, H)
states = states_in.clone()
else:
states_in = states
states = torch.zeros(T0+1, T1+1, B, H*4, device=x.device)
for offset in range(T1-1, -T0, -1):
x_current = x.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
diag_len = x_current.size(0)
new_batch_size = diag_len * B
x_current = x_current.view(new_batch_size, E*4)
x_current_a, x_current_b, x_current_c, x_current_d = x_current.chunk(4, -1)
# calculate previous hidden & cell states for this diagonal
s_current = states.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
s_next = states_in.diagonal(offset=offset-1, dim1=0, dim2=1).permute(-1, 0, 1).contiguous()
s_prev_a0, s_prev_b0, s_prev_c0, s_prev_d0 = s_current[-diag_len:].view(new_batch_size, H*4).chunk(4, 1)
s_prev_a1, s_prev_b1, s_prev_c1, s_prev_d1 = s_current[:diag_len].view(new_batch_size, H*4).chunk(4, 1)
s_prev_a2, s_prev_b2, s_prev_c2, s_prev_d2 = s_next[-diag_len-1:diag_len+1].view(new_batch_size, H*4).chunk(4, 1)
# run batched computation for this diagonal
s_next_a = self.cella(
x_current_a, s_prev_a0, s_prev_a1, s_prev_a2)
s_next_b = self.cellb(
x_current_b, s_prev_b0, s_prev_b1, s_prev_b2)
s_next_c = self.cellc(
x_current_c, s_prev_c0, s_prev_c1, s_prev_c2)
s_next_d = self.celld(
x_current_d, s_prev_d0, s_prev_d1, s_prev_d2)
# separate batch and diag_len again so we can store them accordingly
to_save = torch.cat([s_next_a, s_next_b, s_next_c, s_next_d], -1).view(diag_len, B, H*4)
to_save = to_save * masks.diagonal(offset=offset, dim1=0, dim2=1).permute(-1, 0, 1)
s_next = states.diagonal(offset=offset-1, dim1=0, dim2=1).permute(-1, 0, 1)
s_next[-diag_len-1:diag_len+1] = to_save
states_s = states[1:, :-1].permute(2, 0, 1, 3) # B, T0 T1 H*4
tmp0, tmp1, tmp2, tmp3 = states_s.chunk(4, -1)
states_s = torch.cat([tmp0.flip(2), tmp1.flip(1), tmp2, tmp3.flip(1).flip(2)], -1)
#states_s = torch.cat([tmp0.flip(2), tmp1.flip(1), tmp0.flip(2), tmp1.flip(1)], -1)
return states_s, states # (B, T, H), and (T0+1, T1+1*, B, H)
| 39.347826 | 125 | 0.547416 | 4,682 | 30,770 | 3.36651 | 0.039299 | 0.025377 | 0.045679 | 0.02284 | 0.962949 | 0.962314 | 0.961934 | 0.957873 | 0.95394 | 0.948103 | 0 | 0.054474 | 0.317485 | 30,770 | 782 | 126 | 39.347826 | 0.696062 | 0.093045 | 0 | 0.884692 | 0 | 0 | 0.00749 | 0 | 0 | 0 | 0 | 0 | 0.005964 | 1 | 0.047714 | false | 0.003976 | 0.021869 | 0 | 0.145129 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f946161b8a4f10fa14e135b6913bb4d786935f09 | 7,369 | py | Python | openprocurement/auctions/core/managers.py | EBRD-ProzorroSale/openprocurement.auctions.core | 52bd59f193f25e4997612fca0f87291decf06966 | [
"Apache-2.0"
] | 2 | 2016-09-15T20:17:43.000Z | 2017-01-08T03:32:43.000Z | openprocurement/auctions/core/managers.py | EBRD-ProzorroSale/openprocurement.auctions.core | 52bd59f193f25e4997612fca0f87291decf06966 | [
"Apache-2.0"
] | 183 | 2017-12-21T11:04:37.000Z | 2019-03-27T08:14:34.000Z | openprocurement/auctions/core/managers.py | EBRD-ProzorroSale/openprocurement.auctions.core | 52bd59f193f25e4997612fca0f87291decf06966 | [
"Apache-2.0"
] | 12 | 2016-09-05T12:07:48.000Z | 2019-02-26T09:24:17.000Z | # -*- coding: utf-8 -*-
from zope.interface import implementer
from openprocurement.auctions.core.utils import (
save_auction
)
from openprocurement.auctions.core.interfaces import (
IManager
)
@implementer(IManager)
class AuctionManager(object):
def __init__(self, request, context):
self.request = request
self.context = context
self.changed = False
self.saved = False
def award(self):
auctioner = self.auctioneer(self.request, self.context)
if auctioner.award():
awarding = self.awarding(self.context, self.request)
awarding.start_awarding()
def cancel(self):
canceller = self.canceller(self.request, self.context)
cancellation = canceller.cancel()
return cancellation
def report(self):
manager = self.changion_manager(self.request, self.context)
self.changed = manager.manage()
return self.changed
def change(self):
manager = self.changion_manager(self.request, self.context)
self.changed = manager.manage()
return self.changed
def create(self, applicant):
manager = self.creation_manager(self.request, self.context)
creature = manager.manage(applicant)
if creature:
self.changed = True
return creature
def log(self, action, verbose):
logger = self.log(self.request, self.context)
logger.log(action, verbose)
def get_representation_manager(self):
return self.representation_manager(self.request, self.context)
def save(self):
if self.changed:
self.saved = save_auction(self.request)
return self.saved
@implementer(IManager)
class BidManager(object):
def __init__(self, request, context):
self.request = request
self.context = context
self.changed = False
self.saved = False
def initialize(self):
initializator = self.Initializator(self.request, self.context)
initializator.initialize()
def change(self):
manager = self.changion_manager(self.request, self.context)
self.changed = manager.manage()
return self.changed
def create(self, applicant):
manager = self.creation_manager(self.request, self.context)
creature = manager.manage(applicant)
if creature:
self.changed = True
return creature
def log(self, action, verbose):
logger = self.log(self.request, self.context)
logger.log(action, verbose)
def save(self):
if self.changed:
self.saved = save_auction(self.request)
return self.saved
def delete(self):
manager = self.deletion_manager(self.request, self.context)
deleted = manager.manage()
if deleted:
self.changed = True
return deleted
def get_representation_manager(self):
return self.representation_manager(self.request, self.context)
@implementer(IManager)
class QuestionManager(object):
def __init__(self, request, context):
self.request = request
self.context = context
self.changed = False
self.saved = False
def change(self):
manager = self.changion_manager(self.request, self.context)
self.changed = manager.manage()
return self.changed
def save(self):
if self.changed:
self.saved = save_auction(self.request)
return self.saved
@implementer(IManager)
class ItemManager(object):
def __init__(self, request, context):
self.request = request
self.context = context
self.changed = False
self.saved = False
def change(self):
manager = self.changion_manager(self.request, self.context)
self.changed = manager.manage()
return self.changed
def create(self, applicant):
manager = self.creation_manager(self.request, self.context)
creature = manager.manage(applicant)
if creature:
self.changed = True
return creature
def get_representation_manager(self):
return self.representation_manager(self.request, self.context)
def log(self, action, verbose):
logger = self.log(self.request, self.context)
logger.log(action, verbose)
def save(self):
if self.changed:
self.saved = save_auction(self.request)
return self.saved
@implementer(IManager)
class CancellationManager(object):
def __init__(self, request, context):
self.request = request
self.context = context
self.changed = False
self.saved = False
def change(self):
manager = self.changion_manager(self.request, self.context)
self.changed = manager.manage()
return self.changed
def represent(self, method):
representer = self.Representer(self.context)
return representer.represent(method)
def get_representation_manager(self):
return self.representation_manager(self.request, self.context)
def create(self, applicant):
manager = self.creation_manager(self.request, self.context)
created = manager.manage(applicant)
if created:
self.changed = True
return created
def log(self, action, verbose):
logger = self.log(self.request, self.context)
logger.log(action, verbose)
def save(self):
if self.changed:
self.saved = save_auction(self.request)
return self.saved
@implementer(IManager)
class DocumentManager(object):
def __init__(self, request, context):
self.request = request
self.context = context
self.changed = False
self.saved = False
def change(self):
manager = self.changion_manager(self.request, self.context)
self.changed = manager.manage()
return self.changed
def put(self):
manager = self.changion_manager(self.request, self.context)
self.changed = manager.manage()
return self.changed
def save(self):
if self.changed:
self.saved = save_auction(self.request)
return self.saved
@implementer(IManager)
class BidDocumentManager(object):
def __init__(self, request, context):
self.request = request
self.context = context
self.changed = False
self.saved = False
def change(self):
manager = self.changion_manager(self.request, self.context)
self.changed = manager.manage()
return self.changed
def save(self):
if self.changed:
self.saved = save_auction(self.request)
return self.saved
@implementer(IManager)
class CancellationDocumentManager(object):
def __init__(self, request, context):
self.request = request
self.context = context
self.changed = False
self.saved = False
def create(self, applicant):
pass
def save(self):
if self.changed:
self.saved = save_auction(self.request)
return self.saved
def log(self, action, verbose):
logger = self.log(self.request, self.context)
logger.log(action, verbose)
def represent(self, role):
representer = self.representer(self.context)
return representer.represent(role)
| 27.394052 | 70 | 0.642285 | 807 | 7,369 | 5.780669 | 0.089219 | 0.120257 | 0.13119 | 0.122615 | 0.810075 | 0.803859 | 0.803859 | 0.803859 | 0.776849 | 0.776849 | 0 | 0.000184 | 0.264351 | 7,369 | 268 | 71 | 27.496269 | 0.860358 | 0.00285 | 0 | 0.784314 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.220588 | false | 0.004902 | 0.014706 | 0.019608 | 0.416667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
fb1f7cde1eb262c038bccbbad6e7fb00bc2bad09 | 6,587 | py | Python | test/errors/python_error/python/PackageWithModuleClashingErrorTest.py | dkBrazz/zserio | 29dd8145b7d851fac682d3afe991185ea2eac318 | [
"BSD-3-Clause"
] | 86 | 2018-09-06T09:30:53.000Z | 2022-03-27T01:12:36.000Z | test/errors/python_error/python/PackageWithModuleClashingErrorTest.py | dkBrazz/zserio | 29dd8145b7d851fac682d3afe991185ea2eac318 | [
"BSD-3-Clause"
] | 362 | 2018-09-04T20:21:24.000Z | 2022-03-30T15:14:38.000Z | test/errors/python_error/python/PackageWithModuleClashingErrorTest.py | dkBrazz/zserio | 29dd8145b7d851fac682d3afe991185ea2eac318 | [
"BSD-3-Clause"
] | 20 | 2018-09-10T15:59:02.000Z | 2021-12-01T15:38:22.000Z | import unittest
from testutils import compileErroneousZserio, assertErrorsPresent
class PackageWithModuleClashingErrorTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.errors = {}
compileErroneousZserio(__file__, "package_with_module_clashing/package_with_bitmask_clash_error.zs",
cls.errors)
compileErroneousZserio(__file__, "package_with_module_clashing/package_with_constant_clash_error.zs",
cls.errors)
compileErroneousZserio(__file__, "package_with_module_clashing/package_with_enum_clash_error.zs",
cls.errors)
compileErroneousZserio(__file__,
"package_with_module_clashing/package_with_instantiate_type_clash_error.zs",
cls.errors)
compileErroneousZserio(__file__,
"package_with_module_clashing/package_with_instantiation_clash_error.zs",
cls.errors)
compileErroneousZserio(__file__, "package_with_module_clashing/package_with_pubsub_clash_error.zs",
cls.errors)
compileErroneousZserio(__file__, "package_with_module_clashing/package_with_service_clash_error.zs",
cls.errors)
compileErroneousZserio(__file__, "package_with_module_clashing/package_with_structure_clash_error.zs",
cls.errors)
compileErroneousZserio(__file__, "package_with_module_clashing/package_with_subtype_clash_error.zs",
cls.errors)
def testPackageWithBitmaskClash(self):
assertErrorsPresent(self,
"package_with_module_clashing/package_with_bitmask_clash_error.zs",
[
":5:15: Module " +
"'package_with_module_clashing.package_with_bitmask_clash_error.clashing_name' " +
"generated for package symbol 'ClashingName' clashes with equally named generated package!",
"[ERROR] Python Generator: Package with module name clashing detected!"
]
)
def testPackageWithConstantClash(self):
assertErrorsPresent(self,
"package_with_module_clashing/package_with_constant_clash_error.zs",
[
":5:14: Module " +
"'package_with_module_clashing.package_with_constant_clash_error.clashing_name' " +
"generated for package symbol 'CLASHING_NAME' clashes with equally named generated package!",
"[ERROR] Python Generator: Package with module name clashing detected!"
]
)
def testPackageWithEnumClash(self):
assertErrorsPresent(self,
"package_with_module_clashing/package_with_enum_clash_error.zs",
[
":5:12: Module " +
"'package_with_module_clashing.package_with_enum_clash_error.clashing_name' " +
"generated for package symbol 'ClashingName' clashes with equally named generated package!",
"[ERROR] Python Generator: Package with module name clashing detected!"
]
)
def testPackageWithInstantiateTypeClash(self):
assertErrorsPresent(self,
"package_with_module_clashing/package_with_instantiate_type_clash_error.zs",
[
":18:13: In instantiation of 'Some' required from here",
":5:8: Module " +
"'package_with_module_clashing.package_with_instantiate_type_clash_error.clashing_name' " +
"generated for package symbol 'ClashingName' clashes with equally named generated package!",
"[ERROR] Python Generator: Package with module name clashing detected!"
]
)
def testPackageWithInstantiationClash(self):
assertErrorsPresent(self,
"package_with_module_clashing/package_with_instantiation_clash_error.zs",
[
":16:9: In instantiation of 'Clashing' required from here",
":5:7: Module " +
"'package_with_module_clashing.package_with_instantiation_clash_error.clashing_name' " +
"generated for package symbol 'Clashing_Name' clashes with equally named generated package!",
"[ERROR] Python Generator: Package with module name clashing detected!"
]
)
def testPackageWithPubsubClash(self):
assertErrorsPresent(self,
"package_with_module_clashing/package_with_pubsub_clash_error.zs",
[
":5:8: Module " +
"'package_with_module_clashing.package_with_pubsub_clash_error.clashing_name' " +
"generated for package symbol 'ClashingName' clashes with equally named generated package!",
"[ERROR] Python Generator: Package with module name clashing detected!"
]
)
def testPackageWithServiceClash(self):
assertErrorsPresent(self,
"package_with_module_clashing/package_with_service_clash_error.zs",
[
":5:9: Module " +
"'package_with_module_clashing.package_with_service_clash_error.clashing_name' " +
"generated for package symbol 'ClashingName' clashes with equally named generated package!",
"[ERROR] Python Generator: Package with module name clashing detected!"
]
)
def testPackageWithStructureClash(self):
assertErrorsPresent(self,
"package_with_module_clashing/package_with_structure_clash_error.zs",
[
":5:8: Module " +
"'package_with_module_clashing.package_with_structure_clash_error.clashing_name' " +
"generated for package symbol 'ClashingName' clashes with equally named generated package!",
"[ERROR] Python Generator: Package with module name clashing detected!"
]
)
def testPackageWithSubtypeClash(self):
assertErrorsPresent(self,
"package_with_module_clashing/package_with_subtype_clash_error.zs",
[
":11:18: Module " +
"'package_with_module_clashing.package_with_subtype_clash_error.clashing_name' " +
"generated for package symbol 'ClashingName' clashes with equally named generated package!",
"[ERROR] Python Generator: Package with module name clashing detected!"
]
)
| 50.282443 | 110 | 0.640352 | 607 | 6,587 | 6.555189 | 0.113674 | 0.174164 | 0.153807 | 0.169641 | 0.870068 | 0.867806 | 0.867806 | 0.858758 | 0.858758 | 0.726062 | 0 | 0.006424 | 0.291028 | 6,587 | 130 | 111 | 50.669231 | 0.84561 | 0 | 0 | 0.364407 | 0 | 0 | 0.539092 | 0.286473 | 0 | 0 | 0 | 0 | 0.084746 | 1 | 0.084746 | false | 0 | 0.016949 | 0 | 0.110169 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
55185b6becab64f29a843cfc1570390403aea9c2 | 412 | py | Python | preggy/assertions/__init__.py | rfloriano/preggy | 6226ea4a1ff8614aee97d60fb3193149e834a555 | [
"Unlicense",
"MIT"
] | 10 | 2015-02-26T13:37:08.000Z | 2020-04-08T14:11:27.000Z | preggy/assertions/__init__.py | rfloriano/preggy | 6226ea4a1ff8614aee97d60fb3193149e834a555 | [
"Unlicense",
"MIT"
] | 9 | 2015-03-02T16:04:04.000Z | 2021-04-20T17:47:00.000Z | preggy/assertions/__init__.py | rfloriano/preggy | 6226ea4a1ff8614aee97d60fb3193149e834a555 | [
"Unlicense",
"MIT"
] | 5 | 2015-02-05T15:59:48.000Z | 2020-03-18T14:28:11.000Z | # -*- coding: utf-8 -*-
from __future__ import absolute_import # NOQA
from preggy.assertions.emptiness import * # NOQA
from preggy.assertions.equality import * # NOQA
from preggy.assertions.inclusion import * # NOQA
from preggy.assertions.length import * # NOQA
from preggy.assertions.like import * # NOQA
from preggy.assertions.comparison import * # NOQA
from preggy.assertions.types import * # NOQA
| 31.692308 | 50 | 0.752427 | 51 | 412 | 5.980392 | 0.333333 | 0.262295 | 0.321311 | 0.459016 | 0.688525 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002874 | 0.15534 | 412 | 12 | 51 | 34.333333 | 0.873563 | 0.148058 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.875 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
551e7b91b8a6cf68920712a235a82ef9c6749a69 | 277 | py | Python | scripts/release_test/tests/__init__.py | rec/leds | ed5fd11ed155e7008d4ef6d5b3d82cd7f8b3ed6a | [
"MIT"
] | 253 | 2015-01-03T23:17:57.000Z | 2021-12-14T02:31:08.000Z | scripts/release_test/tests/__init__.py | rec/leds | ed5fd11ed155e7008d4ef6d5b3d82cd7f8b3ed6a | [
"MIT"
] | 879 | 2015-01-11T16:07:25.000Z | 2021-12-10T16:24:31.000Z | scripts/release_test/tests/__init__.py | rec/leds | ed5fd11ed155e7008d4ef6d5b3d82cd7f8b3ed6a | [
"MIT"
] | 71 | 2015-01-04T01:02:47.000Z | 2022-03-25T18:30:10.000Z | from . import (
classic, builder, demo, new, unit, simpixel, rest, remote,
keyboard, j12k, midi, all_pixel, bp, list)
__all__ = [
'classic', 'builder', 'demo', 'new', 'unit', 'simpixel', 'rest', 'remote',
'keyboard', 'j12k', 'midi', 'all_pixel', 'bp', 'list']
| 34.625 | 78 | 0.595668 | 33 | 277 | 4.818182 | 0.515152 | 0.176101 | 0.226415 | 0.264151 | 0.918239 | 0.918239 | 0.918239 | 0.918239 | 0.918239 | 0.918239 | 0 | 0.017778 | 0.187726 | 277 | 7 | 79 | 39.571429 | 0.688889 | 0 | 0 | 0 | 0 | 0 | 0.267148 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
9b2bc08f4d085a5169116a20e2be0819f358d92e | 1,803 | py | Python | 005 Saida Simples/losangos2.py | yamadathamine/300ideiasparaprogramarPython | 331a063bbf8bcd117ae5a34324b8176a6014fc98 | [
"MIT"
] | null | null | null | 005 Saida Simples/losangos2.py | yamadathamine/300ideiasparaprogramarPython | 331a063bbf8bcd117ae5a34324b8176a6014fc98 | [
"MIT"
] | 4 | 2020-06-09T19:10:04.000Z | 2020-06-17T18:23:47.000Z | 005 Saida Simples/losangos2.py | yamadathamine/300ideiasparaprogramarPython | 331a063bbf8bcd117ae5a34324b8176a6014fc98 | [
"MIT"
] | null | null | null | # encoding: utf-8
# usando python 3
# Losangos 2 - No programa do exercício anterior,
# troque o caractere de forma que os losangos sejam feitos com asteriscos (*).
import os
l_inicial = input("Digite a posicão da linha: ")
c_inicial = input("Digite a posição da coluna: ")
os.system('clear')
linha = int(l_inicial)
coluna = int(c_inicial)
print("\033["+str(linha)+";"+str(coluna)+"H*")
coluna+=15
print("\033["+str(linha)+";"+str(coluna)+"H*")
linha+=1
coluna = int(c_inicial)-1
print("\033["+str(linha)+";"+str(coluna)+"H***")
coluna = int(c_inicial)+14
print("\033["+str(linha)+";"+str(coluna)+"H***")
linha+=1
coluna = int(c_inicial)-2
print("\033["+str(linha)+";"+str(coluna)+"H*****")
coluna = int(c_inicial)+13
print("\033["+str(linha)+";"+str(coluna)+"H*****")
linha+=1
coluna = int(c_inicial)-3
print("\033["+str(linha)+";"+str(coluna)+"H*******")
coluna = int(c_inicial)+12
print("\033["+str(linha)+";"+str(coluna)+"H*******")
linha+=1
coluna = int(c_inicial)-4
print("\033["+str(linha)+";"+str(coluna)+"H*********")
coluna = int(c_inicial)+11
print("\033["+str(linha)+";"+str(coluna)+"H*********")
linha+=1
coluna = int(c_inicial)-3
print("\033["+str(linha)+";"+str(coluna)+"H*******")
coluna = int(c_inicial)+12
print("\033["+str(linha)+";"+str(coluna)+"H*******")
linha+=1
coluna = int(c_inicial)-2
print("\033["+str(linha)+";"+str(coluna)+"H*****")
coluna = int(c_inicial)+13
print("\033["+str(linha)+";"+str(coluna)+"H*****")
linha+=1
coluna = int(c_inicial)-1
print("\033["+str(linha)+";"+str(coluna)+"H***")
coluna = int(c_inicial)+14
print("\033["+str(linha)+";"+str(coluna)+"H***")
linha+=1
coluna = int(c_inicial)
print("\033["+str(linha)+";"+str(coluna)+"H*")
coluna+=15
print("\033["+str(linha)+";"+str(coluna)+"H*")
linha = int(l_inicial) + 15
print("\033["+str(linha)+";0H") | 31.086207 | 78 | 0.615641 | 284 | 1,803 | 3.838028 | 0.179577 | 0.13945 | 0.191743 | 0.278899 | 0.775229 | 0.758716 | 0.758716 | 0.758716 | 0.758716 | 0.758716 | 0 | 0.057658 | 0.076539 | 1,803 | 58 | 79 | 31.086207 | 0.596997 | 0.087632 | 0 | 0.823529 | 0 | 0 | 0.16819 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.019608 | 0 | 0.019608 | 0.372549 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
9b81ed315de41b41504b26601645c13d7a9ee229 | 1,140 | py | Python | test/strings/format16.py | kylebarron/MagicPython | da6fa0793e2c85d3bf7709ff1d4f65ccf468db11 | [
"MIT"
] | 1,482 | 2015-10-16T21:59:32.000Z | 2022-03-30T11:44:40.000Z | test/strings/format16.py | kylebarron/MagicPython | da6fa0793e2c85d3bf7709ff1d4f65ccf468db11 | [
"MIT"
] | 226 | 2015-10-15T15:53:44.000Z | 2022-03-25T03:08:27.000Z | test/strings/format16.py | kylebarron/MagicPython | da6fa0793e2c85d3bf7709ff1d4f65ccf468db11 | [
"MIT"
] | 129 | 2015-10-20T02:41:49.000Z | 2022-03-22T01:44:36.000Z | a = b'%b' % b'foo'
a : source.python
: source.python
= : keyword.operator.assignment.python, source.python
: source.python
b : source.python, storage.type.string.python, string.quoted.binary.single.python
' : punctuation.definition.string.begin.python, source.python, string.quoted.binary.single.python
%b : constant.character.format.placeholder.other.python, meta.format.percent.python, source.python, string.quoted.binary.single.python
' : punctuation.definition.string.end.python, source.python, string.quoted.binary.single.python
: source.python
% : keyword.operator.arithmetic.python, source.python
: source.python
b : source.python, storage.type.string.python, string.quoted.binary.single.python
' : punctuation.definition.string.begin.python, source.python, string.quoted.binary.single.python
foo : source.python, string.quoted.binary.single.python
' : punctuation.definition.string.end.python, source.python, string.quoted.binary.single.python
| 57 | 145 | 0.664912 | 126 | 1,140 | 6.015873 | 0.206349 | 0.237467 | 0.261214 | 0.253298 | 0.868074 | 0.788918 | 0.788918 | 0.788918 | 0.781003 | 0.781003 | 0 | 0 | 0.221053 | 1,140 | 19 | 146 | 60 | 0.853604 | 0 | 0 | 0.625 | 0 | 0.1875 | 0.004386 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
9ba536de6114d6b7adaf3c8758157aefca6cd25e | 112 | py | Python | tests/agent_unittests/_test_import_hook.py | lrafeei/newrelic-python-agent | 3dbf080d4104514e49ad8e1d06abac75b6914ee1 | [
"Apache-2.0"
] | null | null | null | tests/agent_unittests/_test_import_hook.py | lrafeei/newrelic-python-agent | 3dbf080d4104514e49ad8e1d06abac75b6914ee1 | [
"Apache-2.0"
] | 1 | 2021-07-30T18:31:14.000Z | 2021-07-30T18:31:14.000Z | tests/agent_unittests/_test_import_hook.py | lrafeei/newrelic-python-agent | 3dbf080d4104514e49ad8e1d06abac75b6914ee1 | [
"Apache-2.0"
] | null | null | null | def run():
pass
class A(object):
def run():
pass
class B(object):
def run():
pass
| 10.181818 | 16 | 0.482143 | 15 | 112 | 3.6 | 0.466667 | 0.333333 | 0.555556 | 0.555556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.383929 | 112 | 10 | 17 | 11.2 | 0.782609 | 0 | 0 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.375 | true | 0.375 | 0 | 0 | 0.625 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 9 |
32fc9c1574bd3dea4c6525b620cde71eb6209a08 | 178 | py | Python | backend/api/converters.py | PostHog/whereintheworld | 6b9f5472d264c6eb9f56fd8bcff08b2aa7f9b20a | [
"MIT"
] | 5 | 2021-09-01T13:10:53.000Z | 2022-01-25T19:37:36.000Z | backend/api/converters.py | PostHog/whereintheworld | 6b9f5472d264c6eb9f56fd8bcff08b2aa7f9b20a | [
"MIT"
] | 3 | 2021-10-03T13:35:20.000Z | 2022-01-21T01:26:51.000Z | backend/api/converters.py | PostHog/whereintheworld | 6b9f5472d264c6eb9f56fd8bcff08b2aa7f9b20a | [
"MIT"
] | 2 | 2022-01-26T07:32:24.000Z | 2022-03-19T17:21:10.000Z | class TransactionalIDConverter:
regex = "[a-z]{2,5}_[a-zA-Z0-9]{24}"
def to_python(self, value):
return value
def to_url(self, value):
return value
| 19.777778 | 40 | 0.606742 | 26 | 178 | 4.038462 | 0.692308 | 0.095238 | 0.285714 | 0.380952 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.045113 | 0.252809 | 178 | 8 | 41 | 22.25 | 0.744361 | 0 | 0 | 0.333333 | 0 | 0.166667 | 0.146067 | 0.146067 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
fd2cb1c645a2ea6baaf9a17c3bd7e621733e9f6f | 162 | py | Python | pypif/obj/system/chemical/__init__.py | ventura-rivera/pypif | 42b40b0a4f80ccf909c9ff8dcc337f726b21be60 | [
"Apache-2.0"
] | 9 | 2016-09-07T19:36:47.000Z | 2022-01-03T13:17:25.000Z | pypif/obj/system/chemical/__init__.py | ventura-rivera/pypif | 42b40b0a4f80ccf909c9ff8dcc337f726b21be60 | [
"Apache-2.0"
] | 20 | 2016-08-22T20:24:28.000Z | 2017-11-28T22:18:47.000Z | pypif/obj/system/chemical/__init__.py | ventura-rivera/pypif | 42b40b0a4f80ccf909c9ff8dcc337f726b21be60 | [
"Apache-2.0"
] | 13 | 2016-01-08T21:09:48.000Z | 2020-04-30T22:13:28.000Z | from pypif.obj.system.chemical.alloy import *
from pypif.obj.system.chemical.common import *
from pypif.obj.system.chemical.chemical_system import ChemicalSystem
| 40.5 | 68 | 0.839506 | 23 | 162 | 5.869565 | 0.391304 | 0.2 | 0.266667 | 0.4 | 0.666667 | 0.474074 | 0 | 0 | 0 | 0 | 0 | 0 | 0.074074 | 162 | 3 | 69 | 54 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
fd621d9341c47e7ed674e46d123b50d0552c0cbc | 11,422 | py | Python | model/SLO.py | chasebk/code_ISLO_ELM | 05194db44d888a7c55daec36cdcf153ab03553d8 | [
"Apache-2.0"
] | 1 | 2022-01-15T15:30:10.000Z | 2022-01-15T15:30:10.000Z | model/SLO.py | chasebk/code_ISLO_ELM | 05194db44d888a7c55daec36cdcf153ab03553d8 | [
"Apache-2.0"
] | 1 | 2022-01-16T14:36:11.000Z | 2022-01-16T14:36:11.000Z | model/SLO.py | chasebk/code_ISLO_ELM | 05194db44d888a7c55daec36cdcf153ab03553d8 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# ------------------------------------------------------------------------------------------------------%
# Created by "Thieu Nguyen" at 10:47, 07/04/2020 %
# %
# Email: nguyenthieu2102@gmail.com %
# Homepage: https://www.researchgate.net/profile/Thieu_Nguyen6 %
# Github: https://github.com/thieu1995 %
# ------------------------------------------------------------------------------------------------------%
from numpy.random import uniform, randint, normal, random, choice, rand
from numpy import abs, sign, cos, pi, sin, sqrt, power
from math import gamma
from copy import deepcopy
from mealpy.root import Root
class BaseSLO(Root):
"""
The original version of: Sea Lion Optimization Algorithm (SLO)
(Sea Lion Optimization Algorithm)
Link:
https://www.researchgate.net/publication/333516932_Sea_Lion_Optimization_Algorithm
DOI: 10.14569/IJACSA.2019.0100548
Notes:
+ The original paper is dummy, tons of unclear equations and parameters
+ You can check my question on the ResearchGate link, the authors seem to be scare so they didn't reply.
"""
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100, **kwargs):
super().__init__(obj_func, lb, ub, verbose, kwargs)
self.epoch = epoch
self.pop_size = pop_size
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop, self.ID_FIT, self.ID_MIN_PROB)
for epoch in range(self.epoch):
c = 2 - 2 * epoch / self.epoch
t0 = rand()
v1 = sin(2 * pi * t0)
v2 = sin(2 * pi * (1 - t0))
SP_leader = abs(v1 * (1 + v2) / v2)
for i in range(self.pop_size):
if SP_leader < 0.25:
if c < 1:
pos_new = g_best[self.ID_POS] - c * abs(2*rand()*g_best[self.ID_POS] - pop[i][self.ID_POS])
else:
ri = choice(list(set(range(0, self.pop_size)) - {i})) # random index
pos_new = pop[ri][self.ID_POS] - c * abs(2 * rand() * pop[ri][self.ID_POS] - pop[i][self.ID_POS])
else:
pos_new = abs(g_best[self.ID_POS] - pop[i][self.ID_POS]) * cos(2*pi*uniform(-1, 1)) + g_best[self.ID_POS]
pos_new = self.amend_position_random_faster(pos_new)
fit = self.get_fitness_position(pos_new)
pop[i] = [pos_new, fit]
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class ImprovedSLO(Root):
"""
My improved version of: Improved Sea Lion Optimization Algorithm (ISLO)
This version based on Levy-flight
Link:
https://www.researchgate.net/publication/333516932_Sea_Lion_Optimization_Algorithm
DOI: 10.14569/IJACSA.2019.0100548
"""
ID_LOC_POS = 2
ID_LOC_FIT = 3
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100, c1=2, c2=2, **kwargs):
super().__init__(obj_func, lb, ub, verbose, kwargs)
self.epoch = epoch
self.pop_size = pop_size
self.c1 = c1
self.c2 = c2
def create_solution(self, minmax=0):
position = uniform(self.lb, self.ub)
fitness = self.get_fitness_position(position=position, minmax=minmax)
local_pos = self.lb + self.ub - position
local_fit = self.get_fitness_position(position=local_pos, minmax=minmax)
if fitness < local_fit:
return [local_pos, local_fit, position, fitness]
else:
return [position, fitness, local_pos, local_fit]
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop, self.ID_FIT, self.ID_MIN_PROB)
for epoch in range(self.epoch):
c = 2 - 2 * epoch / self.epoch
t0 = rand()
v1 = sin(2 * pi * t0) # -1, 1
v2 = sin(2 * pi * (1 - t0)) # -1, 1
SP_leader = abs(v1 * (1 + v2) / v2)
for i in range(self.pop_size):
if SP_leader < 1:
if c > 1: # Exploration
dif1 = 2 * rand() * g_best[self.ID_POS] - pop[i][self.ID_POS]
dif2 = 2 * rand() * pop[i][self.ID_LOC_POS] - pop[i][self.ID_POS]
pos_new = pop[i][self.ID_POS] + c * dif1 + c * dif2
# dif1 = abs(g_best[self.ID_POS] - pop[i][self.ID_POS])
# dif2 = abs(pop[i][self.ID_LOC_POS] - pop[i][self.ID_POS])
# pos_new = pop[i][self.ID_POS] + \
# self.c1 * rand() * (pop[i][self.ID_POS] - c * dif1) + self.c2 * rand() * (pop[i][self.ID_POS] - c * dif2)
else: # Exploitation
pos_new = g_best[self.ID_POS] + c * normal(0, 1) * (2 * rand() * g_best[self.ID_POS] - pop[i][self.ID_POS])
fit_new = self.get_fitness_position(pos_new)
pos_new_oppo = self.lb + self.ub - g_best[self.ID_POS] + rand() * (g_best[self.ID_POS] - pos_new)
fit_new_oppo = self.get_fitness_position(pos_new_oppo)
if fit_new_oppo < fit_new:
pos_new = pos_new_oppo
else:
if rand() < 0.5: # Exploitation
pos_new = g_best[self.ID_POS] + cos(2 * pi * uniform(-1, 1)) * abs(g_best[self.ID_POS] - pop[i][self.ID_POS])
else: # Exploration
pos_new = self.levy_flight(epoch=epoch, position=pop[i][self.ID_POS], g_best_position=g_best[self.ID_POS])
# pos_new = pop[i][self.ID_POS] + self.step_size_by_levy_flight(0.01, 1.5, 2) * (g_best[self.ID_POS] - pop[i][self.ID_POS])
pos_new = self.amend_position_random_faster(pos_new)
fit = self.get_fitness_position(pos_new)
if fit < pop[i][self.ID_LOC_FIT]:
pop[i] = [pos_new, fit, deepcopy(pos_new), deepcopy(fit)]
else:
pop[i][self.ID_POS] = pos_new
pop[i][self.ID_FIT] = fit
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class ISLO(Root):
"""
My improved version of: Improved Sea Lion Optimization Algorithm (ISLO)
(Sea Lion Optimization Algorithm)
Link:
https://www.researchgate.net/publication/333516932_Sea_Lion_Optimization_Algorithm
DOI: 10.14569/IJACSA.2019.0100548
"""
ID_POS_LOC = 2
ID_POS_FIT = 3
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100, c1=1.2, c2=1.2, **kwargs):
super().__init__(obj_func, lb, ub, verbose, kwargs)
self.epoch = epoch
self.pop_size = pop_size
self.c1 = c1
self.c2 = c2
def create_solution(self, minmax=0):
position = uniform(self.lb, self.ub)
fitness = self.get_fitness_position(position=position, minmax=minmax)
local_pos = self.lb + self.ub - position
local_fit = self.get_fitness_position(position=local_pos, minmax=minmax)
if fitness < local_fit:
return [local_pos, local_fit, position, fitness]
else:
return [position, fitness, local_pos, local_fit]
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop, self.ID_FIT, self.ID_MIN_PROB)
for epoch in range(self.epoch):
c = 2 - 2 * epoch / self.epoch
t0 = rand()
v1 = sin(2 * pi * t0)
v2 = sin(2 * pi * (1 - t0))
SP_leader = abs(v1 * (1 + v2) / v2)
for i in range(self.pop_size):
if SP_leader < 0.5:
if c < 1: # Exploitation
# pos_new = g_best[self.ID_POS] - c * abs(2 * rand() * g_best[self.ID_POS] - pop[i][self.ID_POS])
dif1 = abs(2 * rand() * g_best[self.ID_POS] - pop[i][self.ID_POS])
dif2 = abs(2 * rand() * pop[i][self.ID_POS_LOC] - pop[i][self.ID_POS])
pos_new = self.c1 * rand() * (pop[i][self.ID_POS] - c * dif1) + self.c2*rand() *(pop[i][self.ID_POS] - c*dif2)
else: # Exploration
# ri = choice(list(set(range(0, self.pop_size)) - {i})) # random index
# pos_new = pop[ri][self.ID_POS] - c * abs(2 * rand() * pop[ri][self.ID_POS] - pop[i][self.ID_POS])
pos_new = g_best[self.ID_POS] + c * normal(0, 1, self.problem_size) * (g_best[self.ID_POS] - pop[i][self.ID_POS])
fit_new = self.get_fitness_position(pos_new)
pos_new_oppo = self.lb + self.ub - g_best[self.ID_POS] + rand() * (g_best[self.ID_POS] - pos_new)
fit_new_oppo = self.get_fitness_position(pos_new_oppo)
if fit_new_oppo < fit_new:
pos_new = pos_new_oppo
# elif 0.5 <= SP_leader <= 1:
# # pos_new = self.levy_flight(epoch=epoch, position=pop[i][self.ID_POS], g_best_position=g_best[self.ID_POS])
# pos_new = pop[i][self.ID_POS] + self.step_size_by_levy_flight(case=3)
else:
pos_new = abs(g_best[self.ID_POS] - pop[i][self.ID_POS]) * cos(2 * pi * uniform(-1, 1)) + g_best[self.ID_POS]
pos_new = self.amend_position_random_faster(pos_new)
fit = self.get_fitness_position(pos_new)
if fit < pop[i][self.ID_POS_FIT]:
pop[i] = [pos_new, fit, deepcopy(pos_new), deepcopy(fit)]
else:
pop[i][self.ID_POS] = pos_new
pop[i][self.ID_FIT] = fit
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
| 52.394495 | 147 | 0.528016 | 1,542 | 11,422 | 3.668612 | 0.114786 | 0.089093 | 0.097048 | 0.070002 | 0.871133 | 0.868835 | 0.862295 | 0.861411 | 0.853986 | 0.845501 | 0 | 0.033505 | 0.336281 | 11,422 | 217 | 148 | 52.635945 | 0.712703 | 0.228419 | 0 | 0.76 | 0 | 0 | 0.008716 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.053333 | false | 0 | 0.033333 | 0 | 0.18 | 0.02 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bd2ba2e9baff9573d80d66d785aa67330f31239b | 24,323 | py | Python | test/integration/test_findings_v1.py | ibm-cloud-security/scc-python-sdk | 7e08c54ddfa69bbd7d1ffc632c722f1cb786f067 | [
"Apache-2.0"
] | 2 | 2021-06-07T13:31:21.000Z | 2022-02-15T05:05:52.000Z | test/integration/test_findings_v1.py | IBM/scc-python-sdk | 7e08c54ddfa69bbd7d1ffc632c722f1cb786f067 | [
"Apache-2.0"
] | 18 | 2021-06-04T10:44:32.000Z | 2022-03-14T05:41:06.000Z | test/integration/test_findings_v1.py | ibm-cloud-security/scc-python-sdk | 7e08c54ddfa69bbd7d1ffc632c722f1cb786f067 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# (C) Copyright IBM Corp. 2021.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Integration Tests for FindingsV1
"""
import os
import pytest
from ibm_cloud_sdk_core import *
from ibm_scc.findings_v1 import *
import logging
import time
# Config file name
config_file = 'findings_v1.env'
account_id = os.getenv("ACCOUNT_ID")
provider_id = os.getenv("PROVIDER_ID", "sdk-it")
testString = "testString"
identifier = "py-{0}".format(str(time.time()).split(".")[0])
class TestFindingsV1():
"""
Integration Test Class for FindingsV1
"""
@classmethod
def setup_class(cls):
if os.path.exists(config_file):
os.environ['IBM_CREDENTIALS_FILE'] = config_file
cls.findings_service = FindingsV1.new_instance(
account_id=account_id
)
assert cls.findings_service is not None
cls.config = read_external_sources(
FindingsV1.DEFAULT_SERVICE_NAME)
assert cls.config is not None
cls.findings_service.enable_retries()
print('Setup complete.')
@classmethod
def teardown_class(cls):
if os.path.exists(config_file):
os.environ['IBM_CREDENTIALS_FILE'] = config_file
cls.findings_service = FindingsV1.new_instance(
account_id=account_id,
)
assert cls.findings_service is not None
cls.config = read_external_sources(
FindingsV1.DEFAULT_SERVICE_NAME)
assert cls.config is not None
print('Setup complete.')
print(f"cleaning up account: {account_id} with provider: {provider_id}\n")
list_notes_response = cls.findings_service.list_notes(
provider_id=provider_id,
)
for note in list_notes_response.get_result()["notes"]:
parts = note["id"].split("-")
if f"{parts[len(parts)-2]}-{parts[len(parts)-1]}" == identifier:
cls.findings_service.delete_note(
provider_id=provider_id,
note_id=note["id"],
)
list_occurrences_response = cls.findings_service.list_occurrences(
provider_id=provider_id,
)
for occurrence in list_occurrences_response.get_result()["occurrences"]:
parts = occurrence["id"].split("-")
if f"{parts[len(parts)-2]}-{parts[len(parts)-1]}" == identifier:
cls.findings_service.delete_occurrence(
provider_id=provider_id,
occurrence_id=occurrence["id"],
)
print("cleanup was successful\n")
list_providers_response = cls.findings_service.list_providers(
)
for provider in list_providers_response.get_result()["providers"]:
if provider["id"] == provider_id:
print(f"seems like account has some resources left even after a successful cleanup, please consider manual cleanup for account: {account_id} and provider: {provider_id}\n")
needscredentials = pytest.mark.skipif(
not os.path.exists(config_file), reason="External configuration not available, skipping..."
)
@needscredentials
def test_post_graph(self):
post_graph_response = self.findings_service.post_graph(
body='{notes{id}}',
content_type='application/graphql',
)
assert post_graph_response.get_status_code() == 200
@needscredentials
def test_create_note_finding(self):
# Construct a dict representation of a Reporter model
reporter_model = {
'id': testString,
'title': testString,
'url': testString
}
# Construct a dict representation of a ApiNoteRelatedUrl model
api_note_related_url_model = {
'label': testString,
'url': testString
}
# Construct a dict representation of a RemediationStep model
remediation_step_model = {
'title': testString,
'url': testString
}
# Construct a dict representation of a FindingType model
finding_type_model = {
'severity': 'LOW',
'next_steps': [remediation_step_model]
}
create_note_response = self.findings_service.create_note(
provider_id=provider_id,
short_description=testString,
long_description=testString,
kind='FINDING',
id=f'finding-note-{identifier}',
reported_by=reporter_model,
related_url=[api_note_related_url_model],
shared=True,
finding=finding_type_model,
)
assert create_note_response.get_status_code() == 200
api_note = create_note_response.get_result()
assert api_note is not None
@needscredentials
def test_create_note_kpi(self):
# Construct a dict representation of a Reporter model
reporter_model = {
'id': testString,
'title': testString,
'url': testString
}
# Construct a dict representation of a KpiType model
kpi_type_model = {
'aggregation_type': 'SUM',
'severity': "HIGH"
}
create_note_response = self.findings_service.create_note(
provider_id=provider_id,
short_description=testString,
long_description=testString,
kind='KPI',
id=f'kpi-note-{identifier}',
reported_by=reporter_model,
shared=True,
kpi=kpi_type_model,
)
assert create_note_response.get_status_code() == 200
api_note = create_note_response.get_result()
assert api_note is not None
# Note (CARD)
@needscredentials
def test_create_note_card(self):
# Construct a dict representation of a Reporter model
reporter_model = {
'id': testString,
'title': testString,
'url': testString
}
# Construct a dict representation of a ValueTypeFindingCountValueType model
value_type_model = {
'kind': 'FINDING_COUNT',
'finding_note_names': [testString],
'text': testString
}
# Construct a dict representation of a CardElementTimeSeriesCardElement model
card_element_model = {
'text': testString,
'default_interval': testString,
'kind': 'TIME_SERIES',
'default_time_range': '1d',
'value_types': [value_type_model]
}
# Construct a dict representation of a Card model
card_model = {
'section': testString,
'title': testString,
'subtitle': testString,
'finding_note_names': [testString],
'requires_configuration': True,
'badge_text': testString,
'badge_image': testString,
'elements': [card_element_model]
}
create_note_response = self.findings_service.create_note(
provider_id=provider_id,
short_description=testString,
long_description=testString,
kind='CARD',
id=f'card-note-{identifier}',
reported_by=reporter_model,
shared=True,
card=card_model,
)
assert create_note_response.get_status_code() == 200
api_note = create_note_response.get_result()
assert api_note is not None
@needscredentials
def test_create_note_section(self):
# Construct a dict representation of a Reporter model
reporter_model = {
'id': testString,
'title': testString,
'url': testString
}
# Construct a dict representation of a Section model
section_model = {
'title': testString,
'image': testString
}
create_note_response = self.findings_service.create_note(
provider_id=provider_id,
short_description=testString,
long_description=testString,
kind='SECTION',
id=f'section-note-{identifier}',
reported_by=reporter_model,
shared=True,
section=section_model,
)
assert create_note_response.get_status_code() == 200
api_note = create_note_response.get_result()
assert api_note is not None
@needscredentials
def test_list_notes(self):
list_notes_response = self.findings_service.list_notes(
provider_id=provider_id,
)
assert list_notes_response.get_status_code() == 200
api_list_notes_response = list_notes_response.get_result()
assert api_list_notes_response is not None
@needscredentials
def test_get_note(self):
get_note_response = self.findings_service.get_note(
provider_id=provider_id,
note_id=f'section-note-{identifier}',
)
assert get_note_response.get_status_code() == 200
api_note = get_note_response.get_result()
assert api_note is not None
@needscredentials
def test_update_note_finding(self):
# Construct a dict representation of a Reporter model
reporter_model = {
'id': testString,
'title': testString,
'url': testString
}
# Construct a dict representation of a ApiNoteRelatedUrl model
api_note_related_url_model = {
'label': testString,
'url': testString
}
# Construct a dict representation of a RemediationStep model
remediation_step_model = {
'title': testString,
'url': testString
}
# Construct a dict representation of a FindingType model
finding_type_model = {
'severity': 'LOW',
'next_steps': [remediation_step_model]
}
update_note_response = self.findings_service.update_note(
provider_id=provider_id,
note_id=f'finding-note-{identifier}',
short_description=testString,
long_description=testString,
kind='FINDING',
id=f'finding-note-{identifier}',
reported_by=reporter_model,
related_url=[api_note_related_url_model],
shared=True,
finding=finding_type_model,
)
assert update_note_response.get_status_code() == 200
api_note = update_note_response.get_result()
assert api_note is not None
@needscredentials
def test_update_note_kpi(self):
# Construct a dict representation of a Reporter model
reporter_model = {
'id': testString,
'title': testString,
'url': testString
}
# Construct a dict representation of a KpiType model
kpi_type_model = {
'aggregation_type': 'SUM',
'severity': "HIGH"
}
update_note_response = self.findings_service.update_note(
provider_id=provider_id,
note_id=f'kpi-note-{identifier}',
short_description=testString,
long_description=testString,
kind='KPI',
id=f'kpi-note-{identifier}',
reported_by=reporter_model,
shared=True,
kpi=kpi_type_model,
)
assert update_note_response.get_status_code() == 200
api_note = update_note_response.get_result()
assert api_note is not None
@needscredentials
def test_update_note_card(self):
# Construct a dict representation of a Reporter model
reporter_model = {
'id': testString,
'title': testString,
'url': testString
}
# Construct a dict representation of a ValueTypeFindingCountValueType model
value_type_model = {
'kind': 'FINDING_COUNT',
'finding_note_names': [testString],
'text': testString
}
# Construct a dict representation of a CardElementTimeSeriesCardElement model
card_element_model = {
'text': testString,
'default_interval': testString,
'kind': 'TIME_SERIES',
'default_time_range': '1d',
'value_types': [value_type_model]
}
# Construct a dict representation of a Card model
card_model = {
'section': testString,
'title': testString,
'subtitle': testString,
'finding_note_names': [testString],
'requires_configuration': True,
'badge_text': testString,
'badge_image': testString,
'elements': [card_element_model]
}
update_note_response = self.findings_service.update_note(
provider_id=provider_id,
note_id=f'card-note-{identifier}',
short_description=testString,
long_description=testString,
kind='CARD',
id=f'card-note-{identifier}',
reported_by=reporter_model,
shared=True,
card=card_model,
)
assert update_note_response.get_status_code() == 200
api_note = update_note_response.get_result()
assert api_note is not None
@needscredentials
def test_update_note_section(self):
# Construct a dict representation of a Reporter model
reporter_model = {
'id': testString,
'title': testString,
'url': testString
}
# Construct a dict representation of a Section model
section_model = {
'title': testString,
'image': testString
}
update_note_response = self.findings_service.update_note(
provider_id=provider_id,
note_id=f'section-note-{identifier}',
short_description=testString,
long_description=testString,
kind='SECTION',
id=f'section-note-{identifier}',
reported_by=reporter_model,
shared=True,
section=section_model,
)
assert update_note_response.get_status_code() == 200
api_note = update_note_response.get_result()
assert api_note is not None
@needscredentials
def test_create_occurrence_finding(self):
# Construct a dict representation of a Context model
context_model = {
'region': testString,
'resource_crn': testString,
'resource_id': testString,
'resource_name': testString,
'resource_type': testString,
'service_crn': testString,
'service_name': testString,
'environment_name': testString,
'component_name': testString,
'toolchain_id': testString
}
# Construct a dict representation of a RemediationStep model
remediation_step_model = {
'title': testString,
'url': testString
}
# Construct a dict representation of a SocketAddress model
socket_address_model = {
'address': testString,
'port': 38
}
# Construct a dict representation of a NetworkConnection model
network_connection_model = {
'direction': testString,
'protocol': testString,
'client': socket_address_model,
'server': socket_address_model
}
# Construct a dict representation of a DataTransferred model
data_transferred_model = {
'client_bytes': 38,
'server_bytes': 38,
'client_packets': 38,
'server_packets': 38
}
# Construct a dict representation of a Finding model
finding_model = {
'severity': 'LOW',
'certainty': 'LOW',
'next_steps': [remediation_step_model],
'network_connection': network_connection_model,
'data_transferred': data_transferred_model
}
create_occurrence_response = self.findings_service.create_occurrence(
provider_id=provider_id,
note_name="{0}/providers/{1}/notes/finding-note-{2}".format(account_id, provider_id, identifier),
kind='FINDING',
id=f'finding-occurrence-{identifier}',
resource_url=testString,
remediation=testString,
context=context_model,
finding=finding_model,
replace_if_exists=True,
)
assert create_occurrence_response.get_status_code() == 200
api_occurrence = create_occurrence_response.get_result()
assert api_occurrence is not None
@needscredentials
def test_create_occurrence_kpi(self):
# Construct a dict representation of a Context model
context_model = {
'region': testString,
'resource_crn': testString,
'resource_id': testString,
'resource_name': testString,
'resource_type': testString,
'service_crn': testString,
'service_name': testString,
'environment_name': testString,
'component_name': testString,
'toolchain_id': testString
}
# Construct a dict representation of a RemediationStep model
remediation_step_model = {
'title': testString,
'url': testString
}
# Construct a dict representation of a SocketAddress model
socket_address_model = {
'address': testString,
'port': 38
}
# Construct a dict representation of a NetworkConnection model
network_connection_model = {
'direction': testString,
'protocol': testString,
'client': socket_address_model,
'server': socket_address_model
}
# Construct a dict representation of a DataTransferred model
data_transferred_model = {
'client_bytes': 38,
'server_bytes': 38,
'client_packets': 38,
'server_packets': 38
}
# Construct a dict representation of a Finding model
finding_model = {
'severity': 'LOW',
'certainty': 'LOW',
'next_steps': [remediation_step_model],
'network_connection': network_connection_model,
'data_transferred': data_transferred_model
}
# Construct a dict representation of a Kpi model
kpi_model = {
'value': 72.5,
'total': 72.5
}
create_occurrence_response = self.findings_service.create_occurrence(
provider_id=provider_id,
note_name="{0}/providers/{1}/notes/kpi-note-{2}".format(account_id, provider_id, identifier),
kind='KPI',
id=f'kpi-occurrence-{identifier}',
resource_url=testString,
remediation=testString,
context=context_model,
kpi=kpi_model,
replace_if_exists=True,
)
assert create_occurrence_response.get_status_code() == 200
api_occurrence = create_occurrence_response.get_result()
assert api_occurrence is not None
@needscredentials
def test_get_occurrence_note(self):
get_occurrence_note_response = self.findings_service.get_occurrence_note(
provider_id=provider_id,
occurrence_id=f'kpi-occurrence-{identifier}',
)
assert get_occurrence_note_response.get_status_code() == 200
api_note = get_occurrence_note_response.get_result()
assert api_note is not None
@needscredentials
def test_list_occurrences(self):
list_occurrences_response = self.findings_service.list_occurrences(
provider_id=provider_id,
)
assert list_occurrences_response.get_status_code() == 200
api_list_occurrences_response = list_occurrences_response.get_result()
assert api_list_occurrences_response is not None
@needscredentials
def test_list_note_occurrences(self):
list_note_occurrences_response = self.findings_service.list_note_occurrences(
provider_id=provider_id,
note_id=f'finding-note-{identifier}',
)
assert list_note_occurrences_response.get_status_code() == 200
api_list_note_occurrences_response = list_note_occurrences_response.get_result()
assert api_list_note_occurrences_response is not None
@needscredentials
def test_get_occurrence(self):
get_occurrence_response = self.findings_service.get_occurrence(
provider_id=provider_id,
occurrence_id=f'finding-occurrence-{identifier}',
)
assert get_occurrence_response.get_status_code() == 200
api_list_occurrences_response = get_occurrence_response.get_result()
assert api_list_occurrences_response is not None
@needscredentials
def test_update_occurrence_finding(self):
# Construct a dict representation of a RemediationStep model
remediation_step_model = {
'title': testString,
'url': testString
}
# Construct a dict representation of a Finding model
finding_model = {
'severity': 'LOW',
'certainty': 'LOW',
'next_steps': [remediation_step_model],
}
update_occurrence_response = self.findings_service.update_occurrence(
provider_id=provider_id,
note_name="{0}/providers/{1}/notes/finding-note-{2}".format(account_id, provider_id, identifier),
kind='FINDING',
id=f'finding-occurrence-{identifier}',
occurrence_id=f'finding-occurrence-{identifier}',
resource_url=testString,
remediation=testString,
finding=finding_model,
)
assert update_occurrence_response.get_status_code() == 200
api_occurrence = update_occurrence_response.get_result()
assert api_occurrence is not None
@needscredentials
def test_update_occurrence_kpi(self):
# Construct a dict representation of a RemediationStep model
remediation_step_model = {
'title': testString,
'url': testString
}
# Construct a dict representation of a Kpi model
kpi_model = {
'value': 72.5,
'total': 72.5
}
update_occurrence_response = self.findings_service.update_occurrence(
provider_id=provider_id,
note_name="{0}/providers/{1}/notes/kpi-note-{2}".format(account_id, provider_id, identifier),
kind='KPI',
id=f'kpi-occurrence-{identifier}',
occurrence_id=f'kpi-occurrence-{identifier}',
resource_url=testString,
remediation=testString,
kpi=kpi_model,
)
assert update_occurrence_response.get_status_code() == 200
api_occurrence = update_occurrence_response.get_result()
assert api_occurrence is not None
@needscredentials
def test_delete_occurrence(self):
delete_occurrence_response = self.findings_service.delete_occurrence(
provider_id=provider_id,
occurrence_id=f'kpi-occurrence-{identifier}',
)
assert delete_occurrence_response.get_status_code() == 200
@needscredentials
def test_delete_note(self):
delete_note_response = self.findings_service.delete_note(
provider_id=provider_id,
note_id=f'section-note-{identifier}',
)
assert delete_note_response.get_status_code() == 200
| 33.047554 | 188 | 0.611479 | 2,493 | 24,323 | 5.677497 | 0.101083 | 0.040978 | 0.040554 | 0.081108 | 0.875512 | 0.851279 | 0.828812 | 0.821958 | 0.785502 | 0.76685 | 0 | 0.007889 | 0.30687 | 24,323 | 735 | 189 | 33.092517 | 0.831663 | 0.122518 | 0 | 0.716878 | 0 | 0.001815 | 0.125805 | 0.042092 | 0 | 0 | 0 | 0 | 0.07804 | 1 | 0.041742 | false | 0 | 0.010889 | 0 | 0.056261 | 0.009074 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
20f91479478f0935a8b4ff51aff20ac5ee397763 | 43 | py | Python | app/handlers/errors/__init__.py | Katel212/MyPersonalKitchenBot | 03de0beeaf2665e8b3ddd1709da3d4edcd422b80 | [
"MIT"
] | null | null | null | app/handlers/errors/__init__.py | Katel212/MyPersonalKitchenBot | 03de0beeaf2665e8b3ddd1709da3d4edcd422b80 | [
"MIT"
] | 5 | 2020-12-22T17:53:05.000Z | 2021-04-07T20:00:47.000Z | app/handlers/errors/__init__.py | Katel212/MyPersonalKitchenBot | 03de0beeaf2665e8b3ddd1709da3d4edcd422b80 | [
"MIT"
] | null | null | null | from .retry_after import retry_after_error
| 21.5 | 42 | 0.883721 | 7 | 43 | 5 | 0.714286 | 0.571429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.093023 | 43 | 1 | 43 | 43 | 0.897436 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
1f39050f0be91cf42ae151e8e296f8bd7769de0c | 2,558 | py | Python | tests/test_create_authorization.py | fulcrumapp/fulcrum-python | cbb85cfc81aa2142719154621ab25ee8304bad30 | [
"Apache-2.0"
] | 20 | 2015-07-29T03:30:26.000Z | 2021-08-18T14:43:15.000Z | tests/test_create_authorization.py | fulcrumapp/fulcrum-python | cbb85cfc81aa2142719154621ab25ee8304bad30 | [
"Apache-2.0"
] | 22 | 2015-03-14T04:34:15.000Z | 2022-01-19T01:51:33.000Z | tests/test_create_authorization.py | fulcrumapp/fulcrum-python | cbb85cfc81aa2142719154621ab25ee8304bad30 | [
"Apache-2.0"
] | 11 | 2016-03-09T22:12:24.000Z | 2021-09-17T16:18:19.000Z | import unittest
import httpretty
from fulcrum import create_authorization
from tests import api_root
class CreateAuthorizationTest(unittest.TestCase):
@httpretty.activate
def test_valid(self):
httpretty.register_uri(httpretty.POST, api_root + '/authorizations',
body='{"authorization": {"id": "abc-123", "token": "ab6c6266b3ef7bd204def18f8d54c837e84421acd7744d32d84966e0c260830b57de50be21ffd9b5", "token_last_8": "21ffd9b5", "note": "a note"}}',
status=200)
api_resp = create_authorization('dude@email.com', 'bad_password', 'def-456', 'a note')
self.assertIsInstance(api_resp, dict)
self.assertTrue(api_resp['authorization']['id'] == 'abc-123')
@httpretty.activate
def test_valid_with_timeout_user_id(self):
httpretty.register_uri(httpretty.POST, api_root + '/authorizations',
body='{"authorization": {"id": "abc-123", "token": "ab6c6266b3ef7bd204def18f8d54c837e84421acd7744d32d84966e0c260830b57de50be21ffd9b5", "token_last_8": "21ffd9b5", "note": "a note", "user_id": "987-zxy"}}',
status=200)
api_resp = create_authorization('dude@email.com', 'bad_password', 'def-456', 'a note', 3600, '987-zxy')
self.assertIsInstance(api_resp, dict)
self.assertTrue(api_resp['authorization']['user_id'] == '987-zxy')
@httpretty.activate
def test_bad_timeout(self):
httpretty.register_uri(httpretty.POST, api_root + '/authorizations',
body='{"authorization": {"id": "abc-123", "token": "ab6c6266b3ef7bd204def18f8d54c837e84421acd7744d32d84966e0c260830b57de50be21ffd9b5", "token_last_8": "21ffd9b5", "note": "a note"}}',
status=200)
try:
create_authorization('dude@email.com', 'bad_password', 'def-456', 'a note', 888.76)
except ValueError as exc:
self.assertIsInstance(exc, ValueError)
self.assertEqual(str(exc), 'timeout must be an integer.')
@httpretty.activate
def test_bad_user_id(self):
httpretty.register_uri(httpretty.POST, api_root + '/authorizations',
body='{"authorization": {"id": "abc-123", "token": "ab6c6266b3ef7bd204def18f8d54c837e84421acd7744d32d84966e0c260830b57de50be21ffd9b5", "token_last_8": "21ffd9b5", "note": "a note"}}',
status=200)
try:
create_authorization('dude@email.com', 'bad_password', 'def-456', 'a note', 888, 7)
except ValueError as exc:
self.assertIsInstance(exc, ValueError)
self.assertEqual(str(exc), 'user_id must be a string.')
| 53.291667 | 217 | 0.679828 | 268 | 2,558 | 6.317164 | 0.246269 | 0.023627 | 0.05316 | 0.06202 | 0.861193 | 0.795038 | 0.795038 | 0.795038 | 0.795038 | 0.795038 | 0 | 0.131968 | 0.179437 | 2,558 | 47 | 218 | 54.425532 | 0.674607 | 0 | 0 | 0.560976 | 0 | 0.097561 | 0.408913 | 0.129789 | 0 | 0 | 0 | 0 | 0.195122 | 1 | 0.097561 | false | 0.097561 | 0.097561 | 0 | 0.219512 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
2f5d4d31d5a80108d2e0081bc7145a0f16178a62 | 19,936 | py | Python | lttng_ivc/tests/relayd_vs_consumerd/test_relayd_vs_consumerd.py | PSRCode/lttng-ivc | 4d0ded8f0f29b6c09c85a409b2ec53798a3b4964 | [
"MIT"
] | 2 | 2018-05-03T19:11:36.000Z | 2020-04-06T18:26:17.000Z | lttng_ivc/tests/relayd_vs_consumerd/test_relayd_vs_consumerd.py | PSRCode/lttng-ivc | 4d0ded8f0f29b6c09c85a409b2ec53798a3b4964 | [
"MIT"
] | 1 | 2019-02-22T15:21:09.000Z | 2019-02-22T15:21:09.000Z | lttng_ivc/tests/relayd_vs_consumerd/test_relayd_vs_consumerd.py | PSRCode/lttng-ivc | 4d0ded8f0f29b6c09c85a409b2ec53798a3b4964 | [
"MIT"
] | 1 | 2019-02-21T21:44:20.000Z | 2019-02-21T21:44:20.000Z | # Copyright (c) 2017 Jonathan Rajotte-Julien <jonathan.rajotte-julien@efficios.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import pytest
import os
import shutil
import signal
import subprocess
import time
import socket
import lttng_ivc.utils.ProjectFactory as ProjectFactory
import lttng_ivc.utils.utils as utils
import lttng_ivc.utils.runtime as Run
import lttng_ivc.settings as Settings
"""
TODO: Add Command header section
"""
"""
TODO: snapshot, file rotation, rotate
"""
"""
First member: relayd via lttng-tools
Second member: consumerd via lttng-tools
"""
test_matrix_streaming_base = [
("lttng-tools-2.7", "lttng-tools-2.7", True),
("lttng-tools-2.7", "lttng-tools-2.8", True),
("lttng-tools-2.7", "lttng-tools-2.9", True),
("lttng-tools-2.7", "lttng-tools-2.10", True),
("lttng-tools-2.7", "lttng-tools-2.11", True),
("lttng-tools-2.7", "lttng-tools-2.12", True),
("lttng-tools-2.7", "lttng-tools-2.13", True),
("lttng-tools-2.8", "lttng-tools-2.7", True),
("lttng-tools-2.8", "lttng-tools-2.8", True),
("lttng-tools-2.8", "lttng-tools-2.9", True),
("lttng-tools-2.8", "lttng-tools-2.10", True),
("lttng-tools-2.8", "lttng-tools-2.11", True),
("lttng-tools-2.8", "lttng-tools-2.12", True),
("lttng-tools-2.8", "lttng-tools-2.13", True),
("lttng-tools-2.9", "lttng-tools-2.7", True),
("lttng-tools-2.9", "lttng-tools-2.8", True),
("lttng-tools-2.9", "lttng-tools-2.9", True),
("lttng-tools-2.9", "lttng-tools-2.10", True),
("lttng-tools-2.9", "lttng-tools-2.11", True),
("lttng-tools-2.9", "lttng-tools-2.12", True),
("lttng-tools-2.9", "lttng-tools-2.13", True),
("lttng-tools-2.10", "lttng-tools-2.7", True),
("lttng-tools-2.10", "lttng-tools-2.8", True),
("lttng-tools-2.10", "lttng-tools-2.9", True),
("lttng-tools-2.10", "lttng-tools-2.10", True),
("lttng-tools-2.10", "lttng-tools-2.11", True),
("lttng-tools-2.10", "lttng-tools-2.12", True),
("lttng-tools-2.10", "lttng-tools-2.13", True),
("lttng-tools-2.11", "lttng-tools-2.7", True),
("lttng-tools-2.11", "lttng-tools-2.8", True),
("lttng-tools-2.11", "lttng-tools-2.9", True),
("lttng-tools-2.11", "lttng-tools-2.10", True),
("lttng-tools-2.11", "lttng-tools-2.11", True),
("lttng-tools-2.11", "lttng-tools-2.12", True),
("lttng-tools-2.11", "lttng-tools-2.13", True),
("lttng-tools-2.12", "lttng-tools-2.7", True),
("lttng-tools-2.12", "lttng-tools-2.8", True),
("lttng-tools-2.12", "lttng-tools-2.9", True),
("lttng-tools-2.12", "lttng-tools-2.10", True),
("lttng-tools-2.12", "lttng-tools-2.11", True),
("lttng-tools-2.12", "lttng-tools-2.12", True),
("lttng-tools-2.12", "lttng-tools-2.13", True),
("lttng-tools-2.13", "lttng-tools-2.7", True),
("lttng-tools-2.13", "lttng-tools-2.8", True),
("lttng-tools-2.13", "lttng-tools-2.9", True),
("lttng-tools-2.13", "lttng-tools-2.10", True),
("lttng-tools-2.13", "lttng-tools-2.11", True),
("lttng-tools-2.13", "lttng-tools-2.12", True),
("lttng-tools-2.13", "lttng-tools-2.13", True),
]
test_matrix_streaming_regenerate_metadata = [
(
"lttng-tools-2.7",
"lttng-tools-2.7",
"metadata regenerate",
"Unsupported by tools",
),
(
"lttng-tools-2.7",
"lttng-tools-2.8",
"metadata regenerate",
"Unsupported by relayd",
),
(
"lttng-tools-2.7",
"lttng-tools-2.9",
"regenerate metadata",
"Unsupported by relayd",
),
(
"lttng-tools-2.7",
"lttng-tools-2.10",
"regenerate metadata",
"Unsupported by relayd",
),
(
"lttng-tools-2.7",
"lttng-tools-2.11",
"regenerate metadata",
"Unsupported by relayd",
),
(
"lttng-tools-2.7",
"lttng-tools-2.12",
"regenerate metadata",
"Unsupported by relayd",
),
(
"lttng-tools-2.7",
"lttng-tools-2.13",
"regenerate metadata",
"Unsupported by relayd",
),
(
"lttng-tools-2.8",
"lttng-tools-2.7",
"metadata regenerate",
"Unsupported by tools",
),
("lttng-tools-2.8", "lttng-tools-2.8", "metadata regenerate", "Supported"),
("lttng-tools-2.8", "lttng-tools-2.9", "regenerate metadata", "Supported"),
("lttng-tools-2.8", "lttng-tools-2.10", "regenerate metadata", "Supported"),
("lttng-tools-2.8", "lttng-tools-2.11", "regenerate metadata", "Supported"),
("lttng-tools-2.8", "lttng-tools-2.12", "regenerate metadata", "Supported"),
("lttng-tools-2.8", "lttng-tools-2.13", "regenerate metadata", "Supported"),
(
"lttng-tools-2.9",
"lttng-tools-2.7",
"metadata regenerate",
"Unsupported by tools",
),
("lttng-tools-2.9", "lttng-tools-2.8", "metadata regenerate", "Supported"),
("lttng-tools-2.9", "lttng-tools-2.9", "regenerate metadata", "Supported"),
("lttng-tools-2.9", "lttng-tools-2.10", "regenerate metadata", "Supported"),
("lttng-tools-2.9", "lttng-tools-2.11", "regenerate metadata", "Supported"),
("lttng-tools-2.9", "lttng-tools-2.12", "regenerate metadata", "Supported"),
("lttng-tools-2.9", "lttng-tools-2.13", "regenerate metadata", "Supported"),
(
"lttng-tools-2.10",
"lttng-tools-2.7",
"metadata regenerate",
"Unsupported by tools",
),
("lttng-tools-2.10", "lttng-tools-2.8", "metadata regenerate", "Supported"),
("lttng-tools-2.10", "lttng-tools-2.9", "regenerate metadata", "Supported"),
("lttng-tools-2.10", "lttng-tools-2.10", "regenerate metadata", "Supported"),
("lttng-tools-2.10", "lttng-tools-2.11", "regenerate metadata", "Supported"),
("lttng-tools-2.10", "lttng-tools-2.12", "regenerate metadata", "Supported"),
("lttng-tools-2.10", "lttng-tools-2.13", "regenerate metadata", "Supported"),
(
"lttng-tools-2.11",
"lttng-tools-2.7",
"metadata regenerate",
"Unsupported by tools",
),
("lttng-tools-2.11", "lttng-tools-2.8", "metadata regenerate", "Supported"),
("lttng-tools-2.11", "lttng-tools-2.9", "regenerate metadata", "Supported"),
("lttng-tools-2.11", "lttng-tools-2.10", "regenerate metadata", "Supported"),
("lttng-tools-2.11", "lttng-tools-2.11", "regenerate metadata", "Supported"),
("lttng-tools-2.11", "lttng-tools-2.12", "regenerate metadata", "Supported"),
("lttng-tools-2.11", "lttng-tools-2.13", "regenerate metadata", "Supported"),
(
"lttng-tools-2.12",
"lttng-tools-2.7",
"metadata regenerate",
"Unsupported by tools",
),
("lttng-tools-2.12", "lttng-tools-2.8", "metadata regenerate", "Supported"),
("lttng-tools-2.12", "lttng-tools-2.9", "regenerate metadata", "Supported"),
("lttng-tools-2.12", "lttng-tools-2.10", "regenerate metadata", "Supported"),
("lttng-tools-2.12", "lttng-tools-2.11", "regenerate metadata", "Supported"),
("lttng-tools-2.12", "lttng-tools-2.12", "regenerate metadata", "Supported"),
("lttng-tools-2.12", "lttng-tools-2.13", "regenerate metadata", "Supported"),
(
"lttng-tools-2.13",
"lttng-tools-2.7",
"metadata regenerate",
"Unsupported by tools",
),
("lttng-tools-2.13", "lttng-tools-2.8", "metadata regenerate", "Supported"),
("lttng-tools-2.13", "lttng-tools-2.9", "regenerate metadata", "Supported"),
("lttng-tools-2.13", "lttng-tools-2.10", "regenerate metadata", "Supported"),
("lttng-tools-2.13", "lttng-tools-2.11", "regenerate metadata", "Supported"),
("lttng-tools-2.13", "lttng-tools-2.12", "regenerate metadata", "Supported"),
("lttng-tools-2.13", "lttng-tools-2.13", "regenerate metadata", "Supported"),
]
test_matrix_live_base = [
("lttng-tools-2.7", "lttng-tools-2.7", True),
("lttng-tools-2.7", "lttng-tools-2.8", True),
("lttng-tools-2.7", "lttng-tools-2.9", True),
("lttng-tools-2.7", "lttng-tools-2.10", True),
("lttng-tools-2.7", "lttng-tools-2.11", True),
("lttng-tools-2.7", "lttng-tools-2.12", True),
("lttng-tools-2.7", "lttng-tools-2.13", True),
("lttng-tools-2.8", "lttng-tools-2.7", True),
("lttng-tools-2.8", "lttng-tools-2.8", True),
("lttng-tools-2.8", "lttng-tools-2.9", True),
("lttng-tools-2.8", "lttng-tools-2.10", True),
("lttng-tools-2.8", "lttng-tools-2.11", True),
("lttng-tools-2.8", "lttng-tools-2.12", True),
("lttng-tools-2.8", "lttng-tools-2.13", True),
("lttng-tools-2.9", "lttng-tools-2.7", True),
("lttng-tools-2.9", "lttng-tools-2.8", True),
("lttng-tools-2.9", "lttng-tools-2.9", True),
("lttng-tools-2.9", "lttng-tools-2.10", True),
("lttng-tools-2.9", "lttng-tools-2.11", True),
("lttng-tools-2.9", "lttng-tools-2.12", True),
("lttng-tools-2.9", "lttng-tools-2.13", True),
("lttng-tools-2.10", "lttng-tools-2.7", True),
("lttng-tools-2.10", "lttng-tools-2.8", True),
("lttng-tools-2.10", "lttng-tools-2.9", True),
("lttng-tools-2.10", "lttng-tools-2.10", True),
("lttng-tools-2.10", "lttng-tools-2.11", True),
("lttng-tools-2.10", "lttng-tools-2.12", True),
("lttng-tools-2.10", "lttng-tools-2.13", True),
("lttng-tools-2.11", "lttng-tools-2.7", True),
("lttng-tools-2.11", "lttng-tools-2.8", True),
("lttng-tools-2.11", "lttng-tools-2.9", True),
("lttng-tools-2.11", "lttng-tools-2.10", True),
("lttng-tools-2.11", "lttng-tools-2.11", True),
("lttng-tools-2.11", "lttng-tools-2.12", True),
("lttng-tools-2.11", "lttng-tools-2.13", True),
("lttng-tools-2.12", "lttng-tools-2.7", True),
("lttng-tools-2.12", "lttng-tools-2.8", True),
("lttng-tools-2.12", "lttng-tools-2.9", True),
("lttng-tools-2.12", "lttng-tools-2.10", True),
("lttng-tools-2.12", "lttng-tools-2.11", True),
("lttng-tools-2.12", "lttng-tools-2.12", True),
("lttng-tools-2.12", "lttng-tools-2.13", True),
("lttng-tools-2.13", "lttng-tools-2.7", True),
("lttng-tools-2.13", "lttng-tools-2.8", True),
("lttng-tools-2.13", "lttng-tools-2.9", True),
("lttng-tools-2.13", "lttng-tools-2.10", True),
("lttng-tools-2.13", "lttng-tools-2.11", True),
("lttng-tools-2.13", "lttng-tools-2.12", True),
("lttng-tools-2.13", "lttng-tools-2.13", True),
]
runtime_matrix_streaming_base = Settings.generate_runtime_test_matrix(
test_matrix_streaming_base, [0, 1]
)
runtime_matrix_streaming_regenerate_metadata = Settings.generate_runtime_test_matrix(
test_matrix_streaming_regenerate_metadata, [0, 1]
)
runtime_matrix_live_base = Settings.generate_runtime_test_matrix(
test_matrix_live_base, [0, 1]
)
@pytest.mark.parametrize(
"relayd_label,consumerd_label,scenario", runtime_matrix_streaming_base
)
def test_relayd_vs_consumerd_streaming_base(
tmpdir, relayd_label, consumerd_label, scenario
):
nb_loop = 100
nb_expected_events = 100
# Prepare environment
relayd = ProjectFactory.get_precook(relayd_label)
consumerd = ProjectFactory.get_precook(consumerd_label)
babeltrace = ProjectFactory.get_precook(Settings.default_babeltrace)
relayd_runtime_path = os.path.join(str(tmpdir), "relayd")
consumerd_runtime_path = os.path.join(str(tmpdir), "consumerd")
app_path = os.path.join(str(tmpdir), "app")
with Run.get_runtime(relayd_runtime_path) as runtime_relayd, Run.get_runtime(
consumerd_runtime_path
) as runtime_consumerd:
runtime_relayd.add_project(relayd)
runtime_relayd.add_project(babeltrace)
runtime_consumerd.add_project(consumerd)
# Make application using the ust runtime
shutil.copytree(Settings.apps_gen_events_folder, app_path)
runtime_consumerd.run("make V=1", cwd=app_path)
# Start lttng-sessiond
relayd, ctrl_port, data_port, live_port = utils.relayd_spawn(runtime_relayd)
sessiond = utils.sessiond_spawn(runtime_consumerd)
url = "net://localhost:{}:{}".format(ctrl_port, data_port)
# Create session using mi to get path and session name
runtime_consumerd.run("lttng create --set-url={} trace ".format(url))
runtime_consumerd.run("lttng enable-event -u tp:tptest")
runtime_consumerd.run("lttng start")
# Run application
cmd = "./app {}".format(nb_loop)
runtime_consumerd.run(cmd, cwd=app_path)
# Stop tracing
runtime_consumerd.run("lttng stop")
runtime_consumerd.run("lttng destroy -a")
runtime_consumerd.subprocess_terminate(sessiond)
# TODO check for error.
runtime_relayd.subprocess_terminate(relayd)
# Read trace with babeltrace and check for event count via number of line
cmd = "babeltrace {}".format(runtime_relayd.lttng_home)
cp_process, cp_out, cp_err = runtime_relayd.run(cmd)
assert utils.line_count(cp_out) == nb_expected_events
@pytest.mark.parametrize(
"relayd_label,consumerd_label,command, scenario",
runtime_matrix_streaming_regenerate_metadata,
)
def test_relayd_vs_consumerd_streaming_regenerate_metadata(
tmpdir, relayd_label, consumerd_label, command, scenario
):
nb_loop = 100
nb_expected_events = 100
# Prepare environment
relayd = ProjectFactory.get_precook(relayd_label)
consumerd = ProjectFactory.get_precook(consumerd_label)
babeltrace = ProjectFactory.get_precook(Settings.default_babeltrace)
relayd_runtime_path = os.path.join(str(tmpdir), "relayd")
consumerd_runtime_path = os.path.join(str(tmpdir), "consumerd")
app_path = os.path.join(str(tmpdir), "app")
with Run.get_runtime(relayd_runtime_path) as runtime_relayd, Run.get_runtime(
consumerd_runtime_path
) as runtime_consumerd:
runtime_relayd.add_project(relayd)
runtime_relayd.add_project(babeltrace)
runtime_consumerd.add_project(consumerd)
babeltrace_cmd = "babeltrace {}".format(runtime_relayd.lttng_home)
# Make application using the ust runtime
shutil.copytree(Settings.apps_gen_events_folder, app_path)
runtime_consumerd.run("make V=1", cwd=app_path)
# Start lttng-relayd
relayd, ctrl_port, data_port, live_port = utils.relayd_spawn(runtime_relayd)
sessiond = utils.sessiond_spawn(runtime_consumerd)
url = "net://localhost:{}:{}".format(ctrl_port, data_port)
# Create session using mi to get path and session name
runtime_consumerd.run("lttng create --set-url={} trace ".format(url))
runtime_consumerd.run("lttng enable-event -u tp:tptest")
runtime_consumerd.run("lttng start")
# Run application
cmd = "./app {}".format(nb_loop)
runtime_consumerd.run(cmd, cwd=app_path)
# Stop tracing
runtime_consumerd.run("lttng stop")
# Empty the metadata file
metadata = utils.find_file(runtime_relayd.lttng_home, "metadata")
open(metadata, "w").close()
# Babeltrace should never be able to parse the trace
with pytest.raises(subprocess.CalledProcessError):
runtime_relayd.run(babeltrace_cmd)
runtime_consumerd.run("lttng start")
# TODO: rework this a bit to differentiate each errors and rework how
# the condition are meet
if scenario in ("Unsupported by tools", "Unsupported by relayd"):
with pytest.raises(subprocess.CalledProcessError):
runtime_consumerd.run("lttng {}".format(command))
# Make sure everything looks good on this side
sessiond = runtime_consumerd.subprocess_terminate(sessiond)
if sessiond.returncode != 0:
pytest.fail("Return value of sessiond is not zero")
relayd = runtime_relayd.subprocess_terminate(relayd)
if relayd.returncode != 0:
pytest.fail("Return value of relayd is not zero")
return
runtime_consumerd.run("lttng {}".format(command))
runtime_consumerd.run("lttng stop")
runtime_consumerd.run("lttng destroy -a")
# Make sure everything looks good
sessiond = runtime_consumerd.subprocess_terminate(sessiond)
if sessiond.returncode != 0:
pytest.fail("Return value of sessiond is not zero")
relayd = runtime_relayd.subprocess_terminate(relayd)
if relayd.returncode != 0:
pytest.fail("Return value of relayd is not zero")
# Read trace with babeltrace and check for event count via number of line
cp_process, cp_out, cp_err = runtime_relayd.run(babeltrace_cmd)
assert utils.line_count(cp_out) == nb_expected_events
@pytest.mark.parametrize(
"relayd_label,consumerd_label,scenario", runtime_matrix_streaming_base
)
def test_relayd_vs_consumerd_live_base(tmpdir, relayd_label, consumerd_label, scenario):
nb_loop = 100
nb_expected_events = 100
# Prepare environment
relayd = ProjectFactory.get_precook(relayd_label)
consumerd = ProjectFactory.get_precook(consumerd_label)
babeltrace = ProjectFactory.get_precook(Settings.default_babeltrace)
relayd_runtime_path = os.path.join(str(tmpdir), "relayd")
consumerd_runtime_path = os.path.join(str(tmpdir), "consumerd")
app_path = os.path.join(str(tmpdir), "app")
with Run.get_runtime(relayd_runtime_path) as runtime_relayd, Run.get_runtime(
consumerd_runtime_path
) as runtime_consumerd:
runtime_relayd.add_project(relayd)
runtime_relayd.add_project(babeltrace)
runtime_consumerd.add_project(consumerd)
# Make application using the ust runtime
shutil.copytree(Settings.apps_gen_events_folder, app_path)
runtime_consumerd.run("make V=1", cwd=app_path)
# Start lttng-relayd
relayd, ctrl_port, data_port, live_port = utils.relayd_spawn(runtime_relayd)
sessiond = utils.sessiond_spawn(runtime_consumerd)
url = "net://localhost:{}:{}".format(ctrl_port, data_port)
# Create session using mi to get path and session name
runtime_consumerd.run("lttng create --live --set-url={} trace ".format(url))
runtime_consumerd.run("lttng enable-event -u tp:tptest")
runtime_consumerd.run("lttng start")
# Run application
cmd = "./app {}".format(nb_loop)
runtime_consumerd.run(cmd, cwd=app_path)
# Stop tracing
runtime_consumerd.run("lttng stop")
runtime_consumerd.run("lttng destroy -a")
runtime_consumerd.subprocess_terminate(sessiond)
# TODO check for error.
runtime_relayd.subprocess_terminate(relayd)
# Read trace with babeltrace and check for event count via number of line
cmd = "babeltrace {}".format(runtime_relayd.lttng_home)
cp_process, cp_out, cp_err = runtime_relayd.run(cmd)
assert utils.line_count(cp_out) == nb_expected_events
| 40.520325 | 88 | 0.647773 | 2,755 | 19,936 | 4.581851 | 0.09147 | 0.234493 | 0.256199 | 0.114077 | 0.869841 | 0.84956 | 0.827695 | 0.822308 | 0.801711 | 0.798938 | 0 | 0.048683 | 0.184992 | 19,936 | 491 | 89 | 40.602851 | 0.728213 | 0.104685 | 0 | 0.747967 | 0 | 0 | 0.394611 | 0.00987 | 0 | 0 | 0 | 0.00611 | 0.00813 | 1 | 0.00813 | false | 0 | 0.02981 | 0 | 0.04065 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
85d7c2a2b23c07f568c8392811b55861b571cf6c | 8,695 | py | Python | cs3/ocm/provider/v1beta1/provider_api_pb2_grpc.py | cs3org/python-cs3apis | 33f84befa7c6009ce87fb7594128d26ff6e49bbd | [
"Apache-2.0"
] | 1 | 2020-12-17T14:39:57.000Z | 2020-12-17T14:39:57.000Z | cs3/ocm/provider/v1beta1/provider_api_pb2_grpc.py | cs3org/python-cs3apis | 33f84befa7c6009ce87fb7594128d26ff6e49bbd | [
"Apache-2.0"
] | 1 | 2020-05-06T10:23:07.000Z | 2020-05-12T09:07:08.000Z | cs3/ocm/provider/v1beta1/provider_api_pb2_grpc.py | cs3org/python-cs3apis | 33f84befa7c6009ce87fb7594128d26ff6e49bbd | [
"Apache-2.0"
] | 1 | 2020-05-05T09:24:54.000Z | 2020-05-05T09:24:54.000Z | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from cs3.ocm.provider.v1beta1 import provider_api_pb2 as cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2
class ProviderAPIStub(object):
"""OCM Auth Provider API
The Auth Provider API is meant to authenticate a sync'n'share provider regsistered in the mesh.
The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL
NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and
"OPTIONAL" in this document are to be interpreted as described in
RFC 2119.
The following are global requirements that apply to all methods:
Any method MUST return CODE_OK on a succesful operation.
Any method MAY return NOT_IMPLEMENTED.
Any method MAY return INTERNAL.
Any method MAY return UNKNOWN.
Any method MAY return UNAUTHENTICATED.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.IsProviderAllowed = channel.unary_unary(
'/cs3.ocm.provider.v1beta1.ProviderAPI/IsProviderAllowed',
request_serializer=cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.IsProviderAllowedRequest.SerializeToString,
response_deserializer=cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.IsProviderAllowedResponse.FromString,
)
self.GetInfoByDomain = channel.unary_unary(
'/cs3.ocm.provider.v1beta1.ProviderAPI/GetInfoByDomain',
request_serializer=cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.GetInfoByDomainRequest.SerializeToString,
response_deserializer=cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.GetInfoByDomainResponse.FromString,
)
self.ListAllProviders = channel.unary_unary(
'/cs3.ocm.provider.v1beta1.ProviderAPI/ListAllProviders',
request_serializer=cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.ListAllProvidersRequest.SerializeToString,
response_deserializer=cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.ListAllProvidersResponse.FromString,
)
class ProviderAPIServicer(object):
"""OCM Auth Provider API
The Auth Provider API is meant to authenticate a sync'n'share provider regsistered in the mesh.
The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL
NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and
"OPTIONAL" in this document are to be interpreted as described in
RFC 2119.
The following are global requirements that apply to all methods:
Any method MUST return CODE_OK on a succesful operation.
Any method MAY return NOT_IMPLEMENTED.
Any method MAY return INTERNAL.
Any method MAY return UNKNOWN.
Any method MAY return UNAUTHENTICATED.
"""
def IsProviderAllowed(self, request, context):
"""Check if a given system provider is registered in the mesh or not.
MUST return CODE_UNAUTHENTICATED if the system is not registered
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetInfoByDomain(self, request, context):
"""Get the information of the provider identified by a specific domain.
MUST return CODE_NOT_FOUND if the sync'n'share system provider does not exist.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListAllProviders(self, request, context):
"""Get the information of all the providers registered in the mesh.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ProviderAPIServicer_to_server(servicer, server):
rpc_method_handlers = {
'IsProviderAllowed': grpc.unary_unary_rpc_method_handler(
servicer.IsProviderAllowed,
request_deserializer=cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.IsProviderAllowedRequest.FromString,
response_serializer=cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.IsProviderAllowedResponse.SerializeToString,
),
'GetInfoByDomain': grpc.unary_unary_rpc_method_handler(
servicer.GetInfoByDomain,
request_deserializer=cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.GetInfoByDomainRequest.FromString,
response_serializer=cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.GetInfoByDomainResponse.SerializeToString,
),
'ListAllProviders': grpc.unary_unary_rpc_method_handler(
servicer.ListAllProviders,
request_deserializer=cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.ListAllProvidersRequest.FromString,
response_serializer=cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.ListAllProvidersResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'cs3.ocm.provider.v1beta1.ProviderAPI', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class ProviderAPI(object):
"""OCM Auth Provider API
The Auth Provider API is meant to authenticate a sync'n'share provider regsistered in the mesh.
The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL
NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and
"OPTIONAL" in this document are to be interpreted as described in
RFC 2119.
The following are global requirements that apply to all methods:
Any method MUST return CODE_OK on a succesful operation.
Any method MAY return NOT_IMPLEMENTED.
Any method MAY return INTERNAL.
Any method MAY return UNKNOWN.
Any method MAY return UNAUTHENTICATED.
"""
@staticmethod
def IsProviderAllowed(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/cs3.ocm.provider.v1beta1.ProviderAPI/IsProviderAllowed',
cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.IsProviderAllowedRequest.SerializeToString,
cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.IsProviderAllowedResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetInfoByDomain(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/cs3.ocm.provider.v1beta1.ProviderAPI/GetInfoByDomain',
cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.GetInfoByDomainRequest.SerializeToString,
cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.GetInfoByDomainResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListAllProviders(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/cs3.ocm.provider.v1beta1.ProviderAPI/ListAllProviders',
cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.ListAllProvidersRequest.SerializeToString,
cs3_dot_ocm_dot_provider_dot_v1beta1_dot_provider__api__pb2.ListAllProvidersResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 47.513661 | 144 | 0.714549 | 966 | 8,695 | 6.102484 | 0.15528 | 0.070908 | 0.047498 | 0.038677 | 0.820356 | 0.814928 | 0.798643 | 0.766751 | 0.741815 | 0.741815 | 0 | 0.016694 | 0.221507 | 8,695 | 182 | 145 | 47.774725 | 0.854188 | 0.266935 | 0 | 0.470588 | 1 | 0 | 0.089128 | 0.058766 | 0 | 0 | 0 | 0 | 0 | 1 | 0.078431 | false | 0 | 0.019608 | 0.029412 | 0.156863 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c813b9f28af8871a81118ca6f3fd8be5ebfcfab1 | 571 | py | Python | AI 이노베이션 스퀘어 언어지능 과정/20190430/Day02 HTTP, URLlib, Request04.py | donddog/AI_Innovation_Square_Codes | a04d50db011d25e00d8486146c24124c50242aa7 | [
"MIT"
] | 1 | 2021-02-11T16:45:21.000Z | 2021-02-11T16:45:21.000Z | AI 이노베이션 스퀘어 언어지능 과정/20190430/Day02 HTTP, URLlib, Request04.py | donddog/AI_Innovation_Square_Codes | a04d50db011d25e00d8486146c24124c50242aa7 | [
"MIT"
] | null | null | null | AI 이노베이션 스퀘어 언어지능 과정/20190430/Day02 HTTP, URLlib, Request04.py | donddog/AI_Innovation_Square_Codes | a04d50db011d25e00d8486146c24124c50242aa7 | [
"MIT"
] | null | null | null | <<<<<<< HEAD
from urllib import parse
result = parse.urlparse("https://www.google.com/search/q=%EB%B0%95%EB%B3%B4%EC%98%81'")
print([_ for _ in result])
#parse.urlenconde("박보영")
print(parse.quote("박보영"))
print(parse.quote_plus("박보영"))
=======
from urllib import parse
result = parse.urlparse("https://www.google.com/search/q=%EB%B0%95%EB%B3%B4%EC%98%81'")
print([_ for _ in result])
#parse.urlenconde("박보영")
print(parse.quote("박보영"))
print(parse.quote_plus("박보영"))
>>>>>>> 125e15a4c5fcf711dd279c9b18e149867466699e
print(parse.unquote('q=%EB%B0%95%EB%B3%B4%EC%98%81')) | 28.55 | 87 | 0.697023 | 91 | 571 | 4.307692 | 0.32967 | 0.127551 | 0.132653 | 0.183673 | 0.844388 | 0.844388 | 0.844388 | 0.844388 | 0.844388 | 0.844388 | 0 | 0.103383 | 0.068301 | 571 | 20 | 88 | 28.55 | 0.633459 | 0.08056 | 0 | 0.714286 | 0 | 0.142857 | 0.307252 | 0.055344 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.142857 | null | null | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 10 |
c857afeeed539f96e55bfacd4de2e08d559152e2 | 93 | py | Python | sodium/gradcam/__init__.py | Keerthi001/PySodium | 761598d8a129ce95a42404898b7f16ddcae568d9 | [
"MIT"
] | 3 | 2020-04-04T20:22:15.000Z | 2021-02-11T13:13:14.000Z | sodium/gradcam/__init__.py | Keerthi001/PySodium | 761598d8a129ce95a42404898b7f16ddcae568d9 | [
"MIT"
] | 1 | 2020-07-01T14:14:50.000Z | 2020-07-01T16:04:13.000Z | sodium/gradcam/__init__.py | Keerthi001/PySodium | 761598d8a129ce95a42404898b7f16ddcae568d9 | [
"MIT"
] | null | null | null | from .core import GradCAM
from .gradcam import get_gradcam
from .gradcam import plot_gradcam
| 23.25 | 33 | 0.83871 | 14 | 93 | 5.428571 | 0.428571 | 0.289474 | 0.473684 | 0.631579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129032 | 93 | 3 | 34 | 31 | 0.938272 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
c074f5b39efbbc78c6e07acde14030435aa00d35 | 2,465 | py | Python | tests/core/snapshotting_registerfile_test.py | cornell-brg/lizard | 7f9a78a913e64b5cfdee3a26223539ad225bd6da | [
"BSD-3-Clause"
] | 50 | 2019-05-22T08:43:15.000Z | 2022-03-21T23:58:50.000Z | tests/core/snapshotting_registerfile_test.py | cornell-brg/lizard | 7f9a78a913e64b5cfdee3a26223539ad225bd6da | [
"BSD-3-Clause"
] | 1 | 2019-07-27T18:51:52.000Z | 2019-08-02T01:20:22.000Z | tests/core/snapshotting_registerfile_test.py | cornell-brg/lizard | 7f9a78a913e64b5cfdee3a26223539ad225bd6da | [
"BSD-3-Clause"
] | 11 | 2019-12-26T06:00:48.000Z | 2022-03-27T02:29:35.000Z | from pymtl import *
from tests.context import lizard
from lizard.util.test_utils import run_test_vector_sim
from lizard.util.rtl.snapshotting_registerfile import SnapshottingRegisterFile
from tests.config import test_verilog
def test_basic():
run_test_vector_sim(
SnapshottingRegisterFile(64, 32, 2, 1, False, False, 1),
[
('read_addr[0] read_data[0]* write_addr[0] write_data[0] write_call[0] snapshot_call snapshot_target_id restore_call restore_source_id'
),
(0, 0, 0, 8, 1, 0, 0, 0, 0),
(0, 8, 2, 3, 1, 0, 0, 0, 0),
(2, 3, 0, 0, 0, 0, 0, 0, 0),
(2, 3, 0, 0, 0, 1, 0, 0, 0), # save a snapshot into slot 0
(0, 8, 0, 7, 1, 0, 0, 0, 0),
(0, 7, 2, 4, 1, 0, 0, 0, 0),
(2, 4, 0, 0, 0, 0, 0, 0, 0),
(2, 4, 0, 0, 0, 0, 0, 1, 0), # restore the snapshot
(0, 8, 2, 3, 1, 0, 0, 0, 0),
(2, 3, 0, 0, 0, 0, 0, 0, 0),
],
dump_vcd=None,
test_verilog=test_verilog)
def test_snapshot_write():
run_test_vector_sim(
SnapshottingRegisterFile(8, 4, 1, 1, False, False, 1),
[
('read_addr[0] read_data[0]* write_addr[0] write_data[0] write_call[0] snapshot_call snapshot_target_id restore_call restore_source_id'
),
(0, 0, 0, 8, 1, 0, 0, 0, 0),
(0, 8, 2, 3, 1, 0, 0, 0, 0),
(0, 8, 2, 4, 1, 1, 0, 0,
0), # save a snapshot into slot 0 (occurs before write)
(0, 8, 0, 7, 1, 0, 0, 0, 0),
(2, 4, 0, 0, 0, 0, 0, 1,
0), # restore the snapshot (read old value while restoring)
(2, 3, 0, 0, 0, 0, 0, 0, 0),
(0, 8, 0, 0, 0, 0, 0, 0, 0),
],
dump_vcd=None,
test_verilog=test_verilog)
def test_snapshot_write_bypassed():
run_test_vector_sim(
SnapshottingRegisterFile(8, 4, 1, 1, False, True, 1),
[
('read_addr[0] read_data[0]* write_addr[0] write_data[0] write_call[0] snapshot_call snapshot_target_id restore_call restore_source_id'
),
(0, 0, 0, 8, 1, 0, 0, 0, 0),
(0, 8, 2, 3, 1, 0, 0, 0, 0),
(0, 8, 2, 4, 1, 1, 0, 0,
0), # save a snapshot into slot 0 (occurs after write)
(0, 8, 0, 7, 1, 0, 0, 0, 0),
(2, 4, 0, 0, 0, 0, 0, 1, 0), # restore the snapshot
(2, 4, 0, 0, 0, 0, 0, 0, 0),
(0, 8, 0, 0, 0, 0, 0, 0, 0),
],
dump_vcd=None,
test_verilog=test_verilog)
| 37.348485 | 145 | 0.51927 | 430 | 2,465 | 2.830233 | 0.132558 | 0.180772 | 0.199671 | 0.174199 | 0.78636 | 0.753492 | 0.752671 | 0.752671 | 0.752671 | 0.73788 | 0 | 0.15012 | 0.321704 | 2,465 | 65 | 146 | 37.923077 | 0.577751 | 0.090061 | 0 | 0.711864 | 0 | 0.050847 | 0.177102 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.050847 | true | 0.016949 | 0.084746 | 0 | 0.135593 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
23da44171bdb2d70442e54525f346b23c94773ab | 82,311 | py | Python | sdk/python/pulumi_oci/dataflow/application.py | EladGabay/pulumi-oci | 6841e27d4a1a7e15c672306b769912efbfd3ba99 | [
"ECL-2.0",
"Apache-2.0"
] | 5 | 2021-08-17T11:14:46.000Z | 2021-12-31T02:07:03.000Z | sdk/python/pulumi_oci/dataflow/application.py | pulumi-oci/pulumi-oci | 6841e27d4a1a7e15c672306b769912efbfd3ba99 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-09-06T11:21:29.000Z | 2021-09-06T11:21:29.000Z | sdk/python/pulumi_oci/dataflow/application.py | pulumi-oci/pulumi-oci | 6841e27d4a1a7e15c672306b769912efbfd3ba99 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2021-08-24T23:31:30.000Z | 2022-01-02T19:26:54.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ApplicationArgs', 'Application']
@pulumi.input_type
class ApplicationArgs:
def __init__(__self__, *,
compartment_id: pulumi.Input[str],
display_name: pulumi.Input[str],
driver_shape: pulumi.Input[str],
executor_shape: pulumi.Input[str],
file_uri: pulumi.Input[str],
language: pulumi.Input[str],
num_executors: pulumi.Input[int],
spark_version: pulumi.Input[str],
archive_uri: Optional[pulumi.Input[str]] = None,
arguments: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
class_name: Optional[pulumi.Input[str]] = None,
configuration: Optional[pulumi.Input[Mapping[str, Any]]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
description: Optional[pulumi.Input[str]] = None,
execute: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
logs_bucket_uri: Optional[pulumi.Input[str]] = None,
metastore_id: Optional[pulumi.Input[str]] = None,
parameters: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationParameterArgs']]]] = None,
private_endpoint_id: Optional[pulumi.Input[str]] = None,
warehouse_bucket_uri: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Application resource.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of a compartment.
:param pulumi.Input[str] display_name: (Updatable) A user-friendly name. It does not have to be unique. Avoid entering confidential information.
:param pulumi.Input[str] driver_shape: (Updatable) The VM shape for the driver. Sets the driver cores and memory.
:param pulumi.Input[str] executor_shape: (Updatable) The VM shape for the executors. Sets the executor cores and memory.
:param pulumi.Input[str] file_uri: (Updatable) An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
:param pulumi.Input[str] language: (Updatable) The Spark language.
:param pulumi.Input[int] num_executors: (Updatable) The number of executor VMs requested.
:param pulumi.Input[str] spark_version: (Updatable) The Spark version utilized to run the application.
:param pulumi.Input[str] archive_uri: (Updatable) An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
:param pulumi.Input[Sequence[pulumi.Input[str]]] arguments: (Updatable) The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as `Service Api Spec`, where `name` is the name of the parameter. Example: `[ "--input", "${input_file}", "--name", "John Doe" ]` If "input_file" has a value of "mydata.xml", then the value above will be translated to `--input mydata.xml --name "John Doe"`
:param pulumi.Input[str] class_name: (Updatable) The class for the application.
:param pulumi.Input[Mapping[str, Any]] configuration: (Updatable) The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] description: (Updatable) A user-friendly description. Avoid entering confidential information.
:param pulumi.Input[str] execute: (Updatable) The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include ``--class``, ``--file``, ``--jars``, ``--conf``, ``--py-files``, and main application file with arguments. Example: ``--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10`` Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] logs_bucket_uri: (Updatable) An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
:param pulumi.Input[str] metastore_id: (Updatable) The OCID of Oracle Cloud Infrastructure Hive Metastore.
:param pulumi.Input[Sequence[pulumi.Input['ApplicationParameterArgs']]] parameters: (Updatable) An array of name/value pairs used to fill placeholders found in properties like `Application.arguments`. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
:param pulumi.Input[str] private_endpoint_id: (Updatable) The OCID of a private endpoint.
:param pulumi.Input[str] warehouse_bucket_uri: (Updatable) An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "driver_shape", driver_shape)
pulumi.set(__self__, "executor_shape", executor_shape)
pulumi.set(__self__, "file_uri", file_uri)
pulumi.set(__self__, "language", language)
pulumi.set(__self__, "num_executors", num_executors)
pulumi.set(__self__, "spark_version", spark_version)
if archive_uri is not None:
pulumi.set(__self__, "archive_uri", archive_uri)
if arguments is not None:
pulumi.set(__self__, "arguments", arguments)
if class_name is not None:
pulumi.set(__self__, "class_name", class_name)
if configuration is not None:
pulumi.set(__self__, "configuration", configuration)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if description is not None:
pulumi.set(__self__, "description", description)
if execute is not None:
pulumi.set(__self__, "execute", execute)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if logs_bucket_uri is not None:
pulumi.set(__self__, "logs_bucket_uri", logs_bucket_uri)
if metastore_id is not None:
pulumi.set(__self__, "metastore_id", metastore_id)
if parameters is not None:
pulumi.set(__self__, "parameters", parameters)
if private_endpoint_id is not None:
pulumi.set(__self__, "private_endpoint_id", private_endpoint_id)
if warehouse_bucket_uri is not None:
pulumi.set(__self__, "warehouse_bucket_uri", warehouse_bucket_uri)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Input[str]:
"""
(Updatable) The OCID of a compartment.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: pulumi.Input[str]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Input[str]:
"""
(Updatable) A user-friendly name. It does not have to be unique. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: pulumi.Input[str]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="driverShape")
def driver_shape(self) -> pulumi.Input[str]:
"""
(Updatable) The VM shape for the driver. Sets the driver cores and memory.
"""
return pulumi.get(self, "driver_shape")
@driver_shape.setter
def driver_shape(self, value: pulumi.Input[str]):
pulumi.set(self, "driver_shape", value)
@property
@pulumi.getter(name="executorShape")
def executor_shape(self) -> pulumi.Input[str]:
"""
(Updatable) The VM shape for the executors. Sets the executor cores and memory.
"""
return pulumi.get(self, "executor_shape")
@executor_shape.setter
def executor_shape(self, value: pulumi.Input[str]):
pulumi.set(self, "executor_shape", value)
@property
@pulumi.getter(name="fileUri")
def file_uri(self) -> pulumi.Input[str]:
"""
(Updatable) An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
return pulumi.get(self, "file_uri")
@file_uri.setter
def file_uri(self, value: pulumi.Input[str]):
pulumi.set(self, "file_uri", value)
@property
@pulumi.getter
def language(self) -> pulumi.Input[str]:
"""
(Updatable) The Spark language.
"""
return pulumi.get(self, "language")
@language.setter
def language(self, value: pulumi.Input[str]):
pulumi.set(self, "language", value)
@property
@pulumi.getter(name="numExecutors")
def num_executors(self) -> pulumi.Input[int]:
"""
(Updatable) The number of executor VMs requested.
"""
return pulumi.get(self, "num_executors")
@num_executors.setter
def num_executors(self, value: pulumi.Input[int]):
pulumi.set(self, "num_executors", value)
@property
@pulumi.getter(name="sparkVersion")
def spark_version(self) -> pulumi.Input[str]:
"""
(Updatable) The Spark version utilized to run the application.
"""
return pulumi.get(self, "spark_version")
@spark_version.setter
def spark_version(self, value: pulumi.Input[str]):
pulumi.set(self, "spark_version", value)
@property
@pulumi.getter(name="archiveUri")
def archive_uri(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
return pulumi.get(self, "archive_uri")
@archive_uri.setter
def archive_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "archive_uri", value)
@property
@pulumi.getter
def arguments(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
(Updatable) The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as `Service Api Spec`, where `name` is the name of the parameter. Example: `[ "--input", "${input_file}", "--name", "John Doe" ]` If "input_file" has a value of "mydata.xml", then the value above will be translated to `--input mydata.xml --name "John Doe"`
"""
return pulumi.get(self, "arguments")
@arguments.setter
def arguments(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "arguments", value)
@property
@pulumi.getter(name="className")
def class_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The class for the application.
"""
return pulumi.get(self, "class_name")
@class_name.setter
def class_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "class_name", value)
@property
@pulumi.getter
def configuration(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
"""
return pulumi.get(self, "configuration")
@configuration.setter
def configuration(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "configuration", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A user-friendly description. Avoid entering confidential information.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def execute(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include ``--class``, ``--file``, ``--jars``, ``--conf``, ``--py-files``, and main application file with arguments. Example: ``--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10`` Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
"""
return pulumi.get(self, "execute")
@execute.setter
def execute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "execute", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="logsBucketUri")
def logs_bucket_uri(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
return pulumi.get(self, "logs_bucket_uri")
@logs_bucket_uri.setter
def logs_bucket_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logs_bucket_uri", value)
@property
@pulumi.getter(name="metastoreId")
def metastore_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of Oracle Cloud Infrastructure Hive Metastore.
"""
return pulumi.get(self, "metastore_id")
@metastore_id.setter
def metastore_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "metastore_id", value)
@property
@pulumi.getter
def parameters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationParameterArgs']]]]:
"""
(Updatable) An array of name/value pairs used to fill placeholders found in properties like `Application.arguments`. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
"""
return pulumi.get(self, "parameters")
@parameters.setter
def parameters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationParameterArgs']]]]):
pulumi.set(self, "parameters", value)
@property
@pulumi.getter(name="privateEndpointId")
def private_endpoint_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of a private endpoint.
"""
return pulumi.get(self, "private_endpoint_id")
@private_endpoint_id.setter
def private_endpoint_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_endpoint_id", value)
@property
@pulumi.getter(name="warehouseBucketUri")
def warehouse_bucket_uri(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
return pulumi.get(self, "warehouse_bucket_uri")
@warehouse_bucket_uri.setter
def warehouse_bucket_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "warehouse_bucket_uri", value)
@pulumi.input_type
class _ApplicationState:
def __init__(__self__, *,
archive_uri: Optional[pulumi.Input[str]] = None,
arguments: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
class_name: Optional[pulumi.Input[str]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
configuration: Optional[pulumi.Input[Mapping[str, Any]]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
driver_shape: Optional[pulumi.Input[str]] = None,
execute: Optional[pulumi.Input[str]] = None,
executor_shape: Optional[pulumi.Input[str]] = None,
file_uri: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
language: Optional[pulumi.Input[str]] = None,
logs_bucket_uri: Optional[pulumi.Input[str]] = None,
metastore_id: Optional[pulumi.Input[str]] = None,
num_executors: Optional[pulumi.Input[int]] = None,
owner_principal_id: Optional[pulumi.Input[str]] = None,
owner_user_name: Optional[pulumi.Input[str]] = None,
parameters: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationParameterArgs']]]] = None,
private_endpoint_id: Optional[pulumi.Input[str]] = None,
spark_version: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None,
warehouse_bucket_uri: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Application resources.
:param pulumi.Input[str] archive_uri: (Updatable) An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
:param pulumi.Input[Sequence[pulumi.Input[str]]] arguments: (Updatable) The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as `Service Api Spec`, where `name` is the name of the parameter. Example: `[ "--input", "${input_file}", "--name", "John Doe" ]` If "input_file" has a value of "mydata.xml", then the value above will be translated to `--input mydata.xml --name "John Doe"`
:param pulumi.Input[str] class_name: (Updatable) The class for the application.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of a compartment.
:param pulumi.Input[Mapping[str, Any]] configuration: (Updatable) The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] description: (Updatable) A user-friendly description. Avoid entering confidential information.
:param pulumi.Input[str] display_name: (Updatable) A user-friendly name. It does not have to be unique. Avoid entering confidential information.
:param pulumi.Input[str] driver_shape: (Updatable) The VM shape for the driver. Sets the driver cores and memory.
:param pulumi.Input[str] execute: (Updatable) The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include ``--class``, ``--file``, ``--jars``, ``--conf``, ``--py-files``, and main application file with arguments. Example: ``--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10`` Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
:param pulumi.Input[str] executor_shape: (Updatable) The VM shape for the executors. Sets the executor cores and memory.
:param pulumi.Input[str] file_uri: (Updatable) An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] language: (Updatable) The Spark language.
:param pulumi.Input[str] logs_bucket_uri: (Updatable) An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
:param pulumi.Input[str] metastore_id: (Updatable) The OCID of Oracle Cloud Infrastructure Hive Metastore.
:param pulumi.Input[int] num_executors: (Updatable) The number of executor VMs requested.
:param pulumi.Input[str] owner_principal_id: The OCID of the user who created the resource.
:param pulumi.Input[str] owner_user_name: The username of the user who created the resource. If the username of the owner does not exist, `null` will be returned and the caller should refer to the ownerPrincipalId value instead.
:param pulumi.Input[Sequence[pulumi.Input['ApplicationParameterArgs']]] parameters: (Updatable) An array of name/value pairs used to fill placeholders found in properties like `Application.arguments`. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
:param pulumi.Input[str] private_endpoint_id: (Updatable) The OCID of a private endpoint.
:param pulumi.Input[str] spark_version: (Updatable) The Spark version utilized to run the application.
:param pulumi.Input[str] state: The current state of this application.
:param pulumi.Input[str] time_created: The date and time a application was created, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format. Example: `2018-04-03T21:10:29.600Z`
:param pulumi.Input[str] time_updated: The date and time a application was updated, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format. Example: `2018-04-03T21:10:29.600Z`
:param pulumi.Input[str] warehouse_bucket_uri: (Updatable) An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
if archive_uri is not None:
pulumi.set(__self__, "archive_uri", archive_uri)
if arguments is not None:
pulumi.set(__self__, "arguments", arguments)
if class_name is not None:
pulumi.set(__self__, "class_name", class_name)
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if configuration is not None:
pulumi.set(__self__, "configuration", configuration)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if driver_shape is not None:
pulumi.set(__self__, "driver_shape", driver_shape)
if execute is not None:
pulumi.set(__self__, "execute", execute)
if executor_shape is not None:
pulumi.set(__self__, "executor_shape", executor_shape)
if file_uri is not None:
pulumi.set(__self__, "file_uri", file_uri)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if language is not None:
pulumi.set(__self__, "language", language)
if logs_bucket_uri is not None:
pulumi.set(__self__, "logs_bucket_uri", logs_bucket_uri)
if metastore_id is not None:
pulumi.set(__self__, "metastore_id", metastore_id)
if num_executors is not None:
pulumi.set(__self__, "num_executors", num_executors)
if owner_principal_id is not None:
pulumi.set(__self__, "owner_principal_id", owner_principal_id)
if owner_user_name is not None:
pulumi.set(__self__, "owner_user_name", owner_user_name)
if parameters is not None:
pulumi.set(__self__, "parameters", parameters)
if private_endpoint_id is not None:
pulumi.set(__self__, "private_endpoint_id", private_endpoint_id)
if spark_version is not None:
pulumi.set(__self__, "spark_version", spark_version)
if state is not None:
pulumi.set(__self__, "state", state)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
if time_updated is not None:
pulumi.set(__self__, "time_updated", time_updated)
if warehouse_bucket_uri is not None:
pulumi.set(__self__, "warehouse_bucket_uri", warehouse_bucket_uri)
@property
@pulumi.getter(name="archiveUri")
def archive_uri(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
return pulumi.get(self, "archive_uri")
@archive_uri.setter
def archive_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "archive_uri", value)
@property
@pulumi.getter
def arguments(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
(Updatable) The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as `Service Api Spec`, where `name` is the name of the parameter. Example: `[ "--input", "${input_file}", "--name", "John Doe" ]` If "input_file" has a value of "mydata.xml", then the value above will be translated to `--input mydata.xml --name "John Doe"`
"""
return pulumi.get(self, "arguments")
@arguments.setter
def arguments(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "arguments", value)
@property
@pulumi.getter(name="className")
def class_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The class for the application.
"""
return pulumi.get(self, "class_name")
@class_name.setter
def class_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "class_name", value)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of a compartment.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter
def configuration(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
"""
return pulumi.get(self, "configuration")
@configuration.setter
def configuration(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "configuration", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A user-friendly description. Avoid entering confidential information.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A user-friendly name. It does not have to be unique. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="driverShape")
def driver_shape(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The VM shape for the driver. Sets the driver cores and memory.
"""
return pulumi.get(self, "driver_shape")
@driver_shape.setter
def driver_shape(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "driver_shape", value)
@property
@pulumi.getter
def execute(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include ``--class``, ``--file``, ``--jars``, ``--conf``, ``--py-files``, and main application file with arguments. Example: ``--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10`` Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
"""
return pulumi.get(self, "execute")
@execute.setter
def execute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "execute", value)
@property
@pulumi.getter(name="executorShape")
def executor_shape(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The VM shape for the executors. Sets the executor cores and memory.
"""
return pulumi.get(self, "executor_shape")
@executor_shape.setter
def executor_shape(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "executor_shape", value)
@property
@pulumi.getter(name="fileUri")
def file_uri(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
return pulumi.get(self, "file_uri")
@file_uri.setter
def file_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "file_uri", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter
def language(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The Spark language.
"""
return pulumi.get(self, "language")
@language.setter
def language(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "language", value)
@property
@pulumi.getter(name="logsBucketUri")
def logs_bucket_uri(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
return pulumi.get(self, "logs_bucket_uri")
@logs_bucket_uri.setter
def logs_bucket_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logs_bucket_uri", value)
@property
@pulumi.getter(name="metastoreId")
def metastore_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of Oracle Cloud Infrastructure Hive Metastore.
"""
return pulumi.get(self, "metastore_id")
@metastore_id.setter
def metastore_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "metastore_id", value)
@property
@pulumi.getter(name="numExecutors")
def num_executors(self) -> Optional[pulumi.Input[int]]:
"""
(Updatable) The number of executor VMs requested.
"""
return pulumi.get(self, "num_executors")
@num_executors.setter
def num_executors(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "num_executors", value)
@property
@pulumi.getter(name="ownerPrincipalId")
def owner_principal_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the user who created the resource.
"""
return pulumi.get(self, "owner_principal_id")
@owner_principal_id.setter
def owner_principal_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "owner_principal_id", value)
@property
@pulumi.getter(name="ownerUserName")
def owner_user_name(self) -> Optional[pulumi.Input[str]]:
"""
The username of the user who created the resource. If the username of the owner does not exist, `null` will be returned and the caller should refer to the ownerPrincipalId value instead.
"""
return pulumi.get(self, "owner_user_name")
@owner_user_name.setter
def owner_user_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "owner_user_name", value)
@property
@pulumi.getter
def parameters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationParameterArgs']]]]:
"""
(Updatable) An array of name/value pairs used to fill placeholders found in properties like `Application.arguments`. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
"""
return pulumi.get(self, "parameters")
@parameters.setter
def parameters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ApplicationParameterArgs']]]]):
pulumi.set(self, "parameters", value)
@property
@pulumi.getter(name="privateEndpointId")
def private_endpoint_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of a private endpoint.
"""
return pulumi.get(self, "private_endpoint_id")
@private_endpoint_id.setter
def private_endpoint_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_endpoint_id", value)
@property
@pulumi.getter(name="sparkVersion")
def spark_version(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The Spark version utilized to run the application.
"""
return pulumi.get(self, "spark_version")
@spark_version.setter
def spark_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "spark_version", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The current state of this application.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
The date and time a application was created, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format. Example: `2018-04-03T21:10:29.600Z`
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> Optional[pulumi.Input[str]]:
"""
The date and time a application was updated, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format. Example: `2018-04-03T21:10:29.600Z`
"""
return pulumi.get(self, "time_updated")
@time_updated.setter
def time_updated(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_updated", value)
@property
@pulumi.getter(name="warehouseBucketUri")
def warehouse_bucket_uri(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
return pulumi.get(self, "warehouse_bucket_uri")
@warehouse_bucket_uri.setter
def warehouse_bucket_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "warehouse_bucket_uri", value)
class Application(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
archive_uri: Optional[pulumi.Input[str]] = None,
arguments: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
class_name: Optional[pulumi.Input[str]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
configuration: Optional[pulumi.Input[Mapping[str, Any]]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
driver_shape: Optional[pulumi.Input[str]] = None,
execute: Optional[pulumi.Input[str]] = None,
executor_shape: Optional[pulumi.Input[str]] = None,
file_uri: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
language: Optional[pulumi.Input[str]] = None,
logs_bucket_uri: Optional[pulumi.Input[str]] = None,
metastore_id: Optional[pulumi.Input[str]] = None,
num_executors: Optional[pulumi.Input[int]] = None,
parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationParameterArgs']]]]] = None,
private_endpoint_id: Optional[pulumi.Input[str]] = None,
spark_version: Optional[pulumi.Input[str]] = None,
warehouse_bucket_uri: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This resource provides the Application resource in Oracle Cloud Infrastructure Data Flow service.
Creates an application.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_application = oci.dataflow.Application("testApplication",
compartment_id=var["compartment_id"],
display_name=var["application_display_name"],
driver_shape=var["application_driver_shape"],
executor_shape=var["application_executor_shape"],
file_uri=var["application_file_uri"],
language=var["application_language"],
num_executors=var["application_num_executors"],
spark_version=var["application_spark_version"],
archive_uri=var["application_archive_uri"],
arguments=var["application_arguments"],
class_name=var["application_class_name"],
configuration=var["application_configuration"],
defined_tags={
"Operations.CostCenter": "42",
},
description=var["application_description"],
execute=var["application_execute"],
freeform_tags={
"Department": "Finance",
},
logs_bucket_uri=var["application_logs_bucket_uri"],
metastore_id=var["metastore_id"],
parameters=[oci.dataflow.ApplicationParameterArgs(
name=var["application_parameters_name"],
value=var["application_parameters_value"],
)],
private_endpoint_id=oci_dataflow_private_endpoint["test_private_endpoint"]["id"],
warehouse_bucket_uri=var["application_warehouse_bucket_uri"])
```
## Import
Applications can be imported using the `id`, e.g.
```sh
$ pulumi import oci:dataflow/application:Application test_application "id"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] archive_uri: (Updatable) An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
:param pulumi.Input[Sequence[pulumi.Input[str]]] arguments: (Updatable) The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as `Service Api Spec`, where `name` is the name of the parameter. Example: `[ "--input", "${input_file}", "--name", "John Doe" ]` If "input_file" has a value of "mydata.xml", then the value above will be translated to `--input mydata.xml --name "John Doe"`
:param pulumi.Input[str] class_name: (Updatable) The class for the application.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of a compartment.
:param pulumi.Input[Mapping[str, Any]] configuration: (Updatable) The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] description: (Updatable) A user-friendly description. Avoid entering confidential information.
:param pulumi.Input[str] display_name: (Updatable) A user-friendly name. It does not have to be unique. Avoid entering confidential information.
:param pulumi.Input[str] driver_shape: (Updatable) The VM shape for the driver. Sets the driver cores and memory.
:param pulumi.Input[str] execute: (Updatable) The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include ``--class``, ``--file``, ``--jars``, ``--conf``, ``--py-files``, and main application file with arguments. Example: ``--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10`` Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
:param pulumi.Input[str] executor_shape: (Updatable) The VM shape for the executors. Sets the executor cores and memory.
:param pulumi.Input[str] file_uri: (Updatable) An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] language: (Updatable) The Spark language.
:param pulumi.Input[str] logs_bucket_uri: (Updatable) An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
:param pulumi.Input[str] metastore_id: (Updatable) The OCID of Oracle Cloud Infrastructure Hive Metastore.
:param pulumi.Input[int] num_executors: (Updatable) The number of executor VMs requested.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationParameterArgs']]]] parameters: (Updatable) An array of name/value pairs used to fill placeholders found in properties like `Application.arguments`. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
:param pulumi.Input[str] private_endpoint_id: (Updatable) The OCID of a private endpoint.
:param pulumi.Input[str] spark_version: (Updatable) The Spark version utilized to run the application.
:param pulumi.Input[str] warehouse_bucket_uri: (Updatable) An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ApplicationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Application resource in Oracle Cloud Infrastructure Data Flow service.
Creates an application.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_application = oci.dataflow.Application("testApplication",
compartment_id=var["compartment_id"],
display_name=var["application_display_name"],
driver_shape=var["application_driver_shape"],
executor_shape=var["application_executor_shape"],
file_uri=var["application_file_uri"],
language=var["application_language"],
num_executors=var["application_num_executors"],
spark_version=var["application_spark_version"],
archive_uri=var["application_archive_uri"],
arguments=var["application_arguments"],
class_name=var["application_class_name"],
configuration=var["application_configuration"],
defined_tags={
"Operations.CostCenter": "42",
},
description=var["application_description"],
execute=var["application_execute"],
freeform_tags={
"Department": "Finance",
},
logs_bucket_uri=var["application_logs_bucket_uri"],
metastore_id=var["metastore_id"],
parameters=[oci.dataflow.ApplicationParameterArgs(
name=var["application_parameters_name"],
value=var["application_parameters_value"],
)],
private_endpoint_id=oci_dataflow_private_endpoint["test_private_endpoint"]["id"],
warehouse_bucket_uri=var["application_warehouse_bucket_uri"])
```
## Import
Applications can be imported using the `id`, e.g.
```sh
$ pulumi import oci:dataflow/application:Application test_application "id"
```
:param str resource_name: The name of the resource.
:param ApplicationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ApplicationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
archive_uri: Optional[pulumi.Input[str]] = None,
arguments: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
class_name: Optional[pulumi.Input[str]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
configuration: Optional[pulumi.Input[Mapping[str, Any]]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
driver_shape: Optional[pulumi.Input[str]] = None,
execute: Optional[pulumi.Input[str]] = None,
executor_shape: Optional[pulumi.Input[str]] = None,
file_uri: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
language: Optional[pulumi.Input[str]] = None,
logs_bucket_uri: Optional[pulumi.Input[str]] = None,
metastore_id: Optional[pulumi.Input[str]] = None,
num_executors: Optional[pulumi.Input[int]] = None,
parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationParameterArgs']]]]] = None,
private_endpoint_id: Optional[pulumi.Input[str]] = None,
spark_version: Optional[pulumi.Input[str]] = None,
warehouse_bucket_uri: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ApplicationArgs.__new__(ApplicationArgs)
__props__.__dict__["archive_uri"] = archive_uri
__props__.__dict__["arguments"] = arguments
__props__.__dict__["class_name"] = class_name
if compartment_id is None and not opts.urn:
raise TypeError("Missing required property 'compartment_id'")
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["configuration"] = configuration
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["description"] = description
if display_name is None and not opts.urn:
raise TypeError("Missing required property 'display_name'")
__props__.__dict__["display_name"] = display_name
if driver_shape is None and not opts.urn:
raise TypeError("Missing required property 'driver_shape'")
__props__.__dict__["driver_shape"] = driver_shape
__props__.__dict__["execute"] = execute
if executor_shape is None and not opts.urn:
raise TypeError("Missing required property 'executor_shape'")
__props__.__dict__["executor_shape"] = executor_shape
if file_uri is None and not opts.urn:
raise TypeError("Missing required property 'file_uri'")
__props__.__dict__["file_uri"] = file_uri
__props__.__dict__["freeform_tags"] = freeform_tags
if language is None and not opts.urn:
raise TypeError("Missing required property 'language'")
__props__.__dict__["language"] = language
__props__.__dict__["logs_bucket_uri"] = logs_bucket_uri
__props__.__dict__["metastore_id"] = metastore_id
if num_executors is None and not opts.urn:
raise TypeError("Missing required property 'num_executors'")
__props__.__dict__["num_executors"] = num_executors
__props__.__dict__["parameters"] = parameters
__props__.__dict__["private_endpoint_id"] = private_endpoint_id
if spark_version is None and not opts.urn:
raise TypeError("Missing required property 'spark_version'")
__props__.__dict__["spark_version"] = spark_version
__props__.__dict__["warehouse_bucket_uri"] = warehouse_bucket_uri
__props__.__dict__["owner_principal_id"] = None
__props__.__dict__["owner_user_name"] = None
__props__.__dict__["state"] = None
__props__.__dict__["time_created"] = None
__props__.__dict__["time_updated"] = None
super(Application, __self__).__init__(
'oci:dataflow/application:Application',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
archive_uri: Optional[pulumi.Input[str]] = None,
arguments: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
class_name: Optional[pulumi.Input[str]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
configuration: Optional[pulumi.Input[Mapping[str, Any]]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
driver_shape: Optional[pulumi.Input[str]] = None,
execute: Optional[pulumi.Input[str]] = None,
executor_shape: Optional[pulumi.Input[str]] = None,
file_uri: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
language: Optional[pulumi.Input[str]] = None,
logs_bucket_uri: Optional[pulumi.Input[str]] = None,
metastore_id: Optional[pulumi.Input[str]] = None,
num_executors: Optional[pulumi.Input[int]] = None,
owner_principal_id: Optional[pulumi.Input[str]] = None,
owner_user_name: Optional[pulumi.Input[str]] = None,
parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationParameterArgs']]]]] = None,
private_endpoint_id: Optional[pulumi.Input[str]] = None,
spark_version: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None,
warehouse_bucket_uri: Optional[pulumi.Input[str]] = None) -> 'Application':
"""
Get an existing Application resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] archive_uri: (Updatable) An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
:param pulumi.Input[Sequence[pulumi.Input[str]]] arguments: (Updatable) The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as `Service Api Spec`, where `name` is the name of the parameter. Example: `[ "--input", "${input_file}", "--name", "John Doe" ]` If "input_file" has a value of "mydata.xml", then the value above will be translated to `--input mydata.xml --name "John Doe"`
:param pulumi.Input[str] class_name: (Updatable) The class for the application.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of a compartment.
:param pulumi.Input[Mapping[str, Any]] configuration: (Updatable) The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] description: (Updatable) A user-friendly description. Avoid entering confidential information.
:param pulumi.Input[str] display_name: (Updatable) A user-friendly name. It does not have to be unique. Avoid entering confidential information.
:param pulumi.Input[str] driver_shape: (Updatable) The VM shape for the driver. Sets the driver cores and memory.
:param pulumi.Input[str] execute: (Updatable) The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include ``--class``, ``--file``, ``--jars``, ``--conf``, ``--py-files``, and main application file with arguments. Example: ``--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10`` Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
:param pulumi.Input[str] executor_shape: (Updatable) The VM shape for the executors. Sets the executor cores and memory.
:param pulumi.Input[str] file_uri: (Updatable) An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] language: (Updatable) The Spark language.
:param pulumi.Input[str] logs_bucket_uri: (Updatable) An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
:param pulumi.Input[str] metastore_id: (Updatable) The OCID of Oracle Cloud Infrastructure Hive Metastore.
:param pulumi.Input[int] num_executors: (Updatable) The number of executor VMs requested.
:param pulumi.Input[str] owner_principal_id: The OCID of the user who created the resource.
:param pulumi.Input[str] owner_user_name: The username of the user who created the resource. If the username of the owner does not exist, `null` will be returned and the caller should refer to the ownerPrincipalId value instead.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationParameterArgs']]]] parameters: (Updatable) An array of name/value pairs used to fill placeholders found in properties like `Application.arguments`. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
:param pulumi.Input[str] private_endpoint_id: (Updatable) The OCID of a private endpoint.
:param pulumi.Input[str] spark_version: (Updatable) The Spark version utilized to run the application.
:param pulumi.Input[str] state: The current state of this application.
:param pulumi.Input[str] time_created: The date and time a application was created, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format. Example: `2018-04-03T21:10:29.600Z`
:param pulumi.Input[str] time_updated: The date and time a application was updated, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format. Example: `2018-04-03T21:10:29.600Z`
:param pulumi.Input[str] warehouse_bucket_uri: (Updatable) An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ApplicationState.__new__(_ApplicationState)
__props__.__dict__["archive_uri"] = archive_uri
__props__.__dict__["arguments"] = arguments
__props__.__dict__["class_name"] = class_name
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["configuration"] = configuration
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["description"] = description
__props__.__dict__["display_name"] = display_name
__props__.__dict__["driver_shape"] = driver_shape
__props__.__dict__["execute"] = execute
__props__.__dict__["executor_shape"] = executor_shape
__props__.__dict__["file_uri"] = file_uri
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["language"] = language
__props__.__dict__["logs_bucket_uri"] = logs_bucket_uri
__props__.__dict__["metastore_id"] = metastore_id
__props__.__dict__["num_executors"] = num_executors
__props__.__dict__["owner_principal_id"] = owner_principal_id
__props__.__dict__["owner_user_name"] = owner_user_name
__props__.__dict__["parameters"] = parameters
__props__.__dict__["private_endpoint_id"] = private_endpoint_id
__props__.__dict__["spark_version"] = spark_version
__props__.__dict__["state"] = state
__props__.__dict__["time_created"] = time_created
__props__.__dict__["time_updated"] = time_updated
__props__.__dict__["warehouse_bucket_uri"] = warehouse_bucket_uri
return Application(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="archiveUri")
def archive_uri(self) -> pulumi.Output[Optional[str]]:
"""
(Updatable) An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
return pulumi.get(self, "archive_uri")
@property
@pulumi.getter
def arguments(self) -> pulumi.Output[Sequence[str]]:
"""
(Updatable) The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as `Service Api Spec`, where `name` is the name of the parameter. Example: `[ "--input", "${input_file}", "--name", "John Doe" ]` If "input_file" has a value of "mydata.xml", then the value above will be translated to `--input mydata.xml --name "John Doe"`
"""
return pulumi.get(self, "arguments")
@property
@pulumi.getter(name="className")
def class_name(self) -> pulumi.Output[Optional[str]]:
"""
(Updatable) The class for the application.
"""
return pulumi.get(self, "class_name")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
(Updatable) The OCID of a compartment.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter
def configuration(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
"""
return pulumi.get(self, "configuration")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
(Updatable) A user-friendly description. Avoid entering confidential information.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
(Updatable) A user-friendly name. It does not have to be unique. Avoid entering confidential information.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="driverShape")
def driver_shape(self) -> pulumi.Output[str]:
"""
(Updatable) The VM shape for the driver. Sets the driver cores and memory.
"""
return pulumi.get(self, "driver_shape")
@property
@pulumi.getter
def execute(self) -> pulumi.Output[str]:
"""
(Updatable) The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include ``--class``, ``--file``, ``--jars``, ``--conf``, ``--py-files``, and main application file with arguments. Example: ``--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10`` Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
"""
return pulumi.get(self, "execute")
@property
@pulumi.getter(name="executorShape")
def executor_shape(self) -> pulumi.Output[str]:
"""
(Updatable) The VM shape for the executors. Sets the executor cores and memory.
"""
return pulumi.get(self, "executor_shape")
@property
@pulumi.getter(name="fileUri")
def file_uri(self) -> pulumi.Output[str]:
"""
(Updatable) An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
return pulumi.get(self, "file_uri")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter
def language(self) -> pulumi.Output[str]:
"""
(Updatable) The Spark language.
"""
return pulumi.get(self, "language")
@property
@pulumi.getter(name="logsBucketUri")
def logs_bucket_uri(self) -> pulumi.Output[str]:
"""
(Updatable) An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
return pulumi.get(self, "logs_bucket_uri")
@property
@pulumi.getter(name="metastoreId")
def metastore_id(self) -> pulumi.Output[str]:
"""
(Updatable) The OCID of Oracle Cloud Infrastructure Hive Metastore.
"""
return pulumi.get(self, "metastore_id")
@property
@pulumi.getter(name="numExecutors")
def num_executors(self) -> pulumi.Output[int]:
"""
(Updatable) The number of executor VMs requested.
"""
return pulumi.get(self, "num_executors")
@property
@pulumi.getter(name="ownerPrincipalId")
def owner_principal_id(self) -> pulumi.Output[str]:
"""
The OCID of the user who created the resource.
"""
return pulumi.get(self, "owner_principal_id")
@property
@pulumi.getter(name="ownerUserName")
def owner_user_name(self) -> pulumi.Output[str]:
"""
The username of the user who created the resource. If the username of the owner does not exist, `null` will be returned and the caller should refer to the ownerPrincipalId value instead.
"""
return pulumi.get(self, "owner_user_name")
@property
@pulumi.getter
def parameters(self) -> pulumi.Output[Sequence['outputs.ApplicationParameter']]:
"""
(Updatable) An array of name/value pairs used to fill placeholders found in properties like `Application.arguments`. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
"""
return pulumi.get(self, "parameters")
@property
@pulumi.getter(name="privateEndpointId")
def private_endpoint_id(self) -> pulumi.Output[str]:
"""
(Updatable) The OCID of a private endpoint.
"""
return pulumi.get(self, "private_endpoint_id")
@property
@pulumi.getter(name="sparkVersion")
def spark_version(self) -> pulumi.Output[str]:
"""
(Updatable) The Spark version utilized to run the application.
"""
return pulumi.get(self, "spark_version")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The current state of this application.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> pulumi.Output[str]:
"""
The date and time a application was created, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format. Example: `2018-04-03T21:10:29.600Z`
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> pulumi.Output[str]:
"""
The date and time a application was updated, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format. Example: `2018-04-03T21:10:29.600Z`
"""
return pulumi.get(self, "time_updated")
@property
@pulumi.getter(name="warehouseBucketUri")
def warehouse_bucket_uri(self) -> pulumi.Output[str]:
"""
(Updatable) An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
"""
return pulumi.get(self, "warehouse_bucket_uri")
| 61.795045 | 876 | 0.680383 | 10,407 | 82,311 | 5.230422 | 0.037571 | 0.067294 | 0.061727 | 0.054158 | 0.960704 | 0.951243 | 0.940404 | 0.925578 | 0.914004 | 0.90357 | 0 | 0.004846 | 0.205246 | 82,311 | 1,331 | 877 | 61.841473 | 0.827247 | 0.499289 | 0 | 0.773982 | 1 | 0 | 0.108295 | 0.007378 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166886 | false | 0.001314 | 0.009198 | 0 | 0.277267 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
23e2dc2dc251e440b192a830d28a0a4eeed2e55f | 7,578 | py | Python | web/api/tests/routes/test_run_status.py | Farfetch/maestro | 837d93866123aea8b90ad0e2aaf3f13bf3ef6907 | [
"MIT"
] | 21 | 2022-02-02T11:52:14.000Z | 2022-02-06T02:44:16.000Z | web/api/tests/routes/test_run_status.py | Farfetch/maestro | 837d93866123aea8b90ad0e2aaf3f13bf3ef6907 | [
"MIT"
] | 78 | 2022-02-02T12:13:12.000Z | 2022-03-29T08:10:42.000Z | web/api/tests/routes/test_run_status.py | Farfetch/maestro | 837d93866123aea8b90ad0e2aaf3f13bf3ef6907 | [
"MIT"
] | null | null | null | import json
import pytest
from maestro_api.db.models.run import Run, RunStatus
from maestro_api.db.models.run_metric_label import RunMetricLabel
from maestro_api.db.models.run_metric import RunMetric
from maestro_api.db.models.run_agent import RunAgent, RunAgentStatus
from maestro_api.db.models.event import EventType
def test_run_status_start(client):
run_configuration_id = "6326d1e3a216ff15b6e95e9d"
workspace_id = "6076d1e3a216ff15b6e95e9a"
title = "some example title"
run_id = "6076d1e3a216ff15b6e95e1f"
run_plan_id = "6076d1e3a216ff15b6e95e9d"
agent_ids = ["6076d1bfb28b871d6bdb6095"]
Run(
id=run_id,
workspace_id=workspace_id,
run_configuration_id=run_configuration_id,
title=title,
run_plan_id=run_plan_id,
agent_ids=agent_ids,
).save()
response = client.post("/run_status/%s/start" % run_id)
res_json = json.loads(response.data)
agent_event = {
"event_type": EventType.START_RUN.value,
"run_id": run_id,
"agent_id": agent_ids[0],
}
assert len(res_json) == 1
assert agent_event.items() <= res_json[0].items()
@pytest.mark.parametrize(
"run_status",
[RunStatus.FINISHED.value, RunStatus.STOPPED.value, RunStatus.ERROR.value],
)
def test_run_status_restart(client, run_status):
workspace_id = "6076d1e3a216ff15b6e95e9a"
run_configuration_id = "6326d1e3a216ff15b6e95e9d"
title = "some example title"
run_id = "6076d1e3a216ff15b6e95e1f"
run_plan_id = "6076d1e3a216ff15b6e95e9d"
agent_ids = ["6076d1bfb28b871d6bdb6095"]
Run(
id=run_id,
workspace_id=workspace_id,
run_configuration_id=run_configuration_id,
title=title,
run_plan_id=run_plan_id,
run_status=run_status,
agent_ids=agent_ids,
).save()
response = client.post("/run_status/%s/restart" % run_id)
res_json = json.loads(response.data)
agent_event = {
"event_type": EventType.START_RUN.value,
"run_id": run_id,
"agent_id": agent_ids[0],
}
assert len(res_json) == 1
assert agent_event.items() <= res_json[0].items()
@pytest.mark.parametrize(
"run_status",
[RunStatus.CREATING.value, RunStatus.PENDING.value, RunStatus.RUNNING.value],
)
def test_run_status_restart_with_bad_request(client, run_status):
run_configuration_id = "6326d1e3a216ff15b6e95e9d"
workspace_id = "6076d1e3a216ff15b6e95e9a"
title = "some example title"
run_id = "6076d1e3a216ff15b6e95e1f"
run_plan_id = "6076d1e3a216ff15b6e95e9d"
agent_ids = ["6076d1bfb28b871d6bdb6095"]
Run(
id=run_id,
workspace_id=workspace_id,
run_configuration_id=run_configuration_id,
title=title,
run_plan_id=run_plan_id,
agent_ids=agent_ids,
run_status=run_status,
).save()
response = client.post("/run_status/%s/restart" % run_id)
response_text = response.data.decode("utf-8")
assert 400 == response.status_code
assert (
"Run status should be one of ['FINISHED', 'STOPPED', 'ERROR']" == response_text
)
def test_run_status_restart_with_reset_to_default_fields(client):
run_configuration_id = "6326d1e3a216ff15b6e95e9d"
workspace_id = "6076d1e3a216ff15b6e95e9a"
title = "some example title"
diff_run_id = "6076d1e3a216ff15b6e95e2f"
run_id = "6076d1e3a216ff15b6e95e1f"
run_plan_id = "6076d1e3a216ff15b6e95e9d"
agent_id = "6076d1e3a216ff15b6e95e1d"
agent_ids = ["6076d1bfb28b871d6bdb6095"]
Run(
id=run_id,
workspace_id=workspace_id,
run_configuration_id=run_configuration_id,
title=title,
run_plan_id=run_plan_id,
run_status=RunStatus.FINISHED.value,
agent_ids=agent_ids,
).save()
RunMetricLabel(run_id=diff_run_id).save()
RunMetricLabel(run_id=run_id).save()
RunMetric(run_id=diff_run_id).save()
RunMetric(run_id=run_id).save()
RunAgent(
run_id=run_id,
agent_id=agent_id,
agent_status=RunAgentStatus.FINISHED.value,
error_message="some error message",
agent_hostname="agent.maestro.net",
).save()
RunAgent(
run_id=diff_run_id,
agent_id=agent_id,
agent_status=RunAgentStatus.FINISHED.value,
agent_hostname="agent.maestro.net",
).save()
client.post("/run_status/%s/restart" % run_id)
run_metric_labels = RunMetricLabel.objects()
run_metrics = RunMetric.objects()
run_agents = RunAgent.objects()
updated_run = Run.objects.get(id=run_id)
assert 1 == len(run_metrics)
assert 1 == len(run_metric_labels)
assert 2 == len(run_agents)
assert RunAgentStatus.PROCESSING.value == run_agents[0].agent_status
assert "" == run_agents[0].error_message
assert RunAgentStatus.FINISHED.value == run_agents[1].agent_status
assert RunStatus.PENDING.value == updated_run.run_status
def test_run_status_start_with_running_status(client):
run_configuration_id = "6326d1e3a216ff15b6e95e9d"
workspace_id = "6076d1e3a216ff15b6e95e9a"
title = "some example title"
run_id = "6076d1e3a216ff15b6e95e1f"
run_plan_id = "6076d1e3a216ff15b6e95e9d"
agent_ids = ["6076d1bfb28b871d6bdb6095"]
Run(
id=run_id,
workspace_id=workspace_id,
run_configuration_id=run_configuration_id,
title=title,
run_plan_id=run_plan_id,
agent_ids=agent_ids,
run_status=RunStatus.RUNNING.value,
).save()
response = client.post("/run_status/%s/start" % run_id)
response_text = response.data.decode("utf-8")
assert 400 == response.status_code
assert "Run status should be one of ['PENDING']" == response_text
def test_run_status_stop(client):
run_configuration_id = "6326d1e3a216ff15b6e95e9d"
workspace_id = "6076d1e3a216ff15b6e95e9a"
title = "some example title"
run_id = "6076d1e3a216ff15b6e95e1f"
run_plan_id = "6076d1e3a216ff15b6e95e9d"
agent_ids = ["6076d1bfb28b871d6bdb6095"]
Run(
id=run_id,
workspace_id=workspace_id,
run_configuration_id=run_configuration_id,
title=title,
run_plan_id=run_plan_id,
agent_ids=agent_ids,
run_status=RunStatus.RUNNING.value,
).save()
response = client.post("/run_status/%s/stop" % run_id)
res_json = json.loads(response.data)
agent_event = {
"event_type": EventType.STOP_RUN.value,
"run_id": run_id,
"agent_id": agent_ids[0],
}
assert len(res_json) == 1
assert agent_event.items() <= res_json[0].items()
@pytest.mark.parametrize(
"run_status",
[RunStatus.FINISHED.value, RunStatus.STOPPED.value, RunStatus.ERROR.value],
)
def test_run_status_stop_with_bad_request_response(client, run_status):
run_configuration_id = "6326d1e3a216ff15b6e95e9d"
workspace_id = "6076d1e3a216ff15b6e95e9a"
title = "some example title"
run_id = "6076d1e3a216ff15b6e95e1f"
run_plan_id = "6076d1e3a216ff15b6e95e9d"
agent_ids = ["6076d1bfb28b871d6bdb6095"]
Run(
id=run_id,
workspace_id=workspace_id,
run_configuration_id=run_configuration_id,
title=title,
run_plan_id=run_plan_id,
agent_ids=agent_ids,
run_status=run_status,
).save()
response = client.post("/run_status/%s/stop" % run_id)
response_text = response.data.decode("utf-8")
assert 400 == response.status_code
assert (
"Run status should be one of ['PENDING', 'CREATING', 'RUNNING']"
== response_text
)
| 30.312 | 87 | 0.695302 | 906 | 7,578 | 5.483444 | 0.101545 | 0.048309 | 0.076087 | 0.056361 | 0.819042 | 0.791465 | 0.733494 | 0.720008 | 0.700081 | 0.700081 | 0 | 0.096437 | 0.203616 | 7,578 | 249 | 88 | 30.433735 | 0.726761 | 0 | 0 | 0.756098 | 0 | 0 | 0.196358 | 0.125891 | 0 | 0 | 0 | 0 | 0.092683 | 1 | 0.034146 | false | 0 | 0.034146 | 0 | 0.068293 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
23f44e3986f81f3ff04554de8900d3f103847843 | 19,257 | py | Python | Liquid-experiments/util/jobModel.py | PasaLab/YAO | 2e70203197cd79f9522d65731ee5dc0eb236b005 | [
"Apache-2.0"
] | 2 | 2021-08-30T14:12:09.000Z | 2022-01-20T02:14:22.000Z | Liquid-experiments/util/jobModel.py | PasaLab/YAO | 2e70203197cd79f9522d65731ee5dc0eb236b005 | [
"Apache-2.0"
] | null | null | null | Liquid-experiments/util/jobModel.py | PasaLab/YAO | 2e70203197cd79f9522d65731ee5dc0eb236b005 | [
"Apache-2.0"
] | null | null | null | import json
import random
def convert_job_status(status_code):
status_map = [
'Created', # 0
'Starting', # 1
'Running', # 2
'Stopped', # 3
'Finished', # 4
'Failed', # 5
]
if 0 <= status_code < len(status_map):
return status_map[status_code]
return 'Unknown'
def get_job(job_name, seed=2020):
random.seed(seed)
job = {}
if job_name == 'sleep':
job = {
'name': 'sleep',
'workspace': 'http://code.pasalab.jluapp.com/newnius/yao-job-benchmarks.git',
'cluster': 'default',
'priority': '25',
'run_before': '',
'locality': '0',
'tasks': json.dumps([{
"name": "node1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}]),
}
elif job_name == 'small':
cmd = "sleep 300"
job = {
'name': job_name,
'workspace': 'http://code.pasalab.jluapp.com/newnius/yao-job-CNN.git',
'cluster': 'default',
'priority': '25',
'run_before': '',
'locality': '0',
'tasks': json.dumps([{
"name": "node1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node2",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}]),
}
elif job_name == 'large':
cmd = "sleep 600"
job = {
'name': job_name,
'workspace': 'http://code.pasalab.jluapp.com/newnius/yao-job-CNN.git',
'cluster': 'default',
'priority': '25',
'run_before': '',
'locality': '0',
'tasks': json.dumps([{
"name": "node1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node2",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node3",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node4",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node5",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node6",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node7",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node8",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node9",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node10",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node11",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node12",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node13",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node14",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node15",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node16",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}]),
}
elif job_name == 'job1':
job = {
'name': 'job1',
'workspace': 'http://code.pasalab.jluapp.com/newnius/yao-job-benchmarks.git',
'cluster': 'default',
'priority': '25',
'run_before': '',
'locality': '0',
'tasks': json.dumps([{
"name": "node1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}]),
}
elif job_name == 'job2':
job = {
'name': 'job2',
'workspace': 'http://code.pasalab.jluapp.com/newnius/yao-job-benchmarks.git',
'cluster': 'default',
'priority': '25',
'run_before': '',
'locality': '0',
'tasks': json.dumps([{
"name": "node1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node2",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}]),
}
elif job_name == 'job3':
job = {
'name': 'job3',
'workspace': 'http://code.pasalab.jluapp.com/newnius/yao-job-benchmarks.git',
'cluster': 'default',
'priority': '25',
'run_before': '',
'locality': '0',
'tasks': json.dumps([{
"name": "node1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "2",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node2",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "2",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node3",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "2",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}]),
}
elif job_name == 'job5':
job = {
'name': 'job5',
'workspace': 'http://code.pasalab.jluapp.com/newnius/yao-job-benchmarks.git',
'cluster': 'default',
'priority': '25',
'run_before': '',
'locality': '0',
'tasks': json.dumps([{
"name": "node1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node2",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node3",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node4",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node5",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}]),
}
elif job_name == 'job10':
job = {
'name': 'job10',
'workspace': 'http://code.pasalab.jluapp.com/newnius/yao-job-benchmarks.git',
'cluster': 'default',
'priority': '25',
'run_before': '',
'locality': '0',
'tasks': json.dumps([{
"name": "node1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node2",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node3",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node4",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node5",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node6",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node7",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node8",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node9",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}, {
"name": "node10",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": "sleep infinity",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}]),
}
elif job_name == 'cnn':
cmd = "PYTHONPATH=\"$PYTHONPATH:/workspace\""
cmd += " python /workspace/official/r1/mnist/mnist.py"
cmd += " --data_dir=/workspace/data/"
job = {
'name': job_name,
'workspace': 'http://code.pasalab.jluapp.com/newnius/yao-job-CNN.git',
'cluster': 'default',
'priority': '25',
'run_before': '',
'locality': '0',
'tasks': json.dumps([{
"name": "node1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "4096",
"is_ps": "0",
"gpu_model": "t4",
}]),
}
elif job_name == 'lstm':
cmd = "PYTHONPATH=\"$PYTHONPATH:/workspace\""
cmd += " python3 /workspace/official/staging/shakespeare/shakespeare_main.py"
cmd += " --training_data=/workspace/data/shakespeare.txt"
job = {
'name': job_name,
'workspace': 'http://code.pasalab.jluapp.com/newnius/yao-job-LSTM.git',
'cluster': 'default',
'priority': '25',
'run_before': '',
'locality': '0',
'tasks': json.dumps([{
"name": "node1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:2.1-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}]),
}
elif job_name == 'neumf':
batch_size = 1000
# batch_size = random.randint(1, 3) * 1000
cmd = "PYTHONPATH=\"$PYTHONPATH:/workspace\""
cmd += " python /workspace/official/recommendation/ncf_keras_main.py"
cmd += " --batch_size=" + str(batch_size)
cmd += " --data_dir=/workspace/data/"
cmd += " --dataset=ml-20m"
job = {
'name': 'neumf',
'workspace': 'http://code.pasalab.jluapp.com/newnius/yao-job-NeuMF.git',
'cluster': 'default',
'priority': '25',
'run_before': '',
'locality': '0',
'tasks': json.dumps([{
"name": "node1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:2.1-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "4096",
"is_ps": "0",
"gpu_model": "t4",
}]),
}
elif job_name == 'resnet50' or job_name == 'vgg16' or job_name == 'inception3':
num_gpus = random.randint(1, 2)
#num_gpus = 3
batch_size = random.randint(1, 8) * 4
num_batches = random.randint(4, 20) * 50
if job_name == 'vgg16':
batch_size = random.randint(4, 8) * 4
num_batches = random.randint(4, 10) * 50
cmd = "python /workspace/scripts/tf_cnn_benchmarks/tf_cnn_benchmarks.py"
cmd += " --model=" + job_name
cmd += " --num_gpus=" + str(num_gpus)
cmd += " --batch_size=" + str(batch_size)
cmd += " --num_batches=" + str(num_batches)
cmd += " --train_dir=/tmp"
cmd += " --variable_update=parameter_server"
cmd += " --save_model_steps=0"
job = {
'name': job_name,
'workspace': 'http://code.pasalab.jluapp.com/newnius/yao-job-benchmarks.git',
'cluster': 'default',
'priority': '25',
'run_before': '',
'locality': '0',
'tasks': json.dumps([{
"name": "node1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd,
"cpu_number": "4",
"memory": "4096",
"gpu_number": str(num_gpus),
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
}]),
}
elif job_name == 'resnet50_d' or job_name == 'vgg16_d' or job_name == 'inception3_d':
model = job_name.split('_')[0]
batch_size = random.randint(1, 8) * 4
num_batches = random.randint(4, 20) * 50
cmd = "python /workspace/scripts/tf_cnn_benchmarks/tf_cnn_benchmarks.py"
cmd += " --model=" + model
cmd += " --num_gpus=1"
cmd += " --batch_size=" + str(batch_size)
cmd += " --num_batches=" + str(num_batches)
cmd += " --train_dir=/tmp"
cmd += " --variable_update=distributed_replicated"
cmd += " --ps_hosts=ps1:2222"
cmd += " --worker_hosts=worker1:2222,worker2:2222"
cmd += " --save_model_steps=0"
job = {
'name': job_name,
'workspace': 'http://code.pasalab.jluapp.com/newnius/yao-job-benchmarks.git',
'cluster': 'default',
'priority': '25',
'run_before': '',
'locality': '0',
'tasks': json.dumps([
{
"name": "worker1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd + " --job_name=worker" + " --task_index=0",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
},
{
"name": "worker2",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd + " --job_name=worker" + " --task_index=1",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "0",
"gpu_model": "t4",
},
{
"name": "ps1",
"image": "registry.cn-beijing.aliyuncs.com/quickdeploy0/yao-tensorflow:1.14-gpu",
"cmd": cmd + " --job_name=ps" + " --task_index=0",
"cpu_number": "4",
"memory": "4096",
"gpu_number": "1",
"gpu_memory": "8192",
"is_ps": "1",
"gpu_model": "t4",
},
]),
}
else:
print("[WARN] job {} not exist".format(job_name))
return job
| 28.65625 | 87 | 0.538454 | 2,299 | 19,257 | 4.360157 | 0.07003 | 0.048883 | 0.070331 | 0.103152 | 0.903432 | 0.893356 | 0.893356 | 0.888069 | 0.876496 | 0.871907 | 0 | 0.064321 | 0.235447 | 19,257 | 671 | 88 | 28.698957 | 0.616518 | 0.003323 | 0 | 0.852632 | 0 | 0.070677 | 0.539189 | 0.200832 | 0 | 0 | 0 | 0 | 0 | 1 | 0.003008 | false | 0 | 0.003008 | 0 | 0.010526 | 0.001504 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f1de366ed90546db6631418046b51ed325868381 | 164 | py | Python | gridalert/template/__init__.py | tkishimoto/gridalert | 2b69999e7dbb6f3dc4919280a1f5bbceaf9e0c43 | [
"Apache-2.0"
] | 1 | 2019-07-19T10:16:15.000Z | 2019-07-19T10:16:15.000Z | gridalert/template/__init__.py | tkishimoto/gridalert | 2b69999e7dbb6f3dc4919280a1f5bbceaf9e0c43 | [
"Apache-2.0"
] | null | null | null | gridalert/template/__init__.py | tkishimoto/gridalert | 2b69999e7dbb6f3dc4919280a1f5bbceaf9e0c43 | [
"Apache-2.0"
] | null | null | null | from .logwatch_template import *
from .elastic_template import *
from .logwatchfine_template import *
from .messages_template import *
from .sts_template import *
| 23.428571 | 36 | 0.810976 | 20 | 164 | 6.4 | 0.4 | 0.546875 | 0.5625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.128049 | 164 | 6 | 37 | 27.333333 | 0.895105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
f1f66f30547b8235d318613d9af1c2d4b6fc6a86 | 41 | py | Python | decryption/__init__.py | trishantpahwa/PasswordProtectPDF | a2f167a7e93f7470bb336bf2773f2f3d1c241fbd | [
"BSD-2-Clause"
] | null | null | null | decryption/__init__.py | trishantpahwa/PasswordProtectPDF | a2f167a7e93f7470bb336bf2773f2f3d1c241fbd | [
"BSD-2-Clause"
] | null | null | null | decryption/__init__.py | trishantpahwa/PasswordProtectPDF | a2f167a7e93f7470bb336bf2773f2f3d1c241fbd | [
"BSD-2-Clause"
] | null | null | null | from .decrypt_file import remove_password | 41 | 41 | 0.902439 | 6 | 41 | 5.833333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.073171 | 41 | 1 | 41 | 41 | 0.921053 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 1 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
7b1401de36c535dda3d9de37be792e3da5c789d0 | 271,060 | py | Python | isi_sdk/apis/auth_api.py | Atomicology/isilon_sdk_python | 91039da803ae37ed4abf8d2a3f59c333f3ef1866 | [
"MIT"
] | null | null | null | isi_sdk/apis/auth_api.py | Atomicology/isilon_sdk_python | 91039da803ae37ed4abf8d2a3f59c333f3ef1866 | [
"MIT"
] | null | null | null | isi_sdk/apis/auth_api.py | Atomicology/isilon_sdk_python | 91039da803ae37ed4abf8d2a3f59c333f3ef1866 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
AuthApi.py
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class AuthApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_auth_group(self, auth_group, **kwargs):
"""
Create a new group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_auth_group(auth_group, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AuthGroupCreateParams auth_group: (required)
:param str zone: Optional zone.
:param str provider: Optional provider type.
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_group', 'zone', 'provider']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_auth_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_group' is set
if ('auth_group' not in params) or (params['auth_group'] is None):
raise ValueError("Missing the required parameter `auth_group` when calling `create_auth_group`")
resource_path = '/platform/1/auth/groups'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'zone' in params:
query_params['zone'] = params['zone']
if 'provider' in params:
query_params['provider'] = params['provider']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'auth_group' in params:
body_params = params['auth_group']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_auth_refresh_item(self, auth_refresh_item, **kwargs):
"""
Refresh the authentication service configuration.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_auth_refresh_item(auth_refresh_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Empty auth_refresh_item: (required)
:return: CreateAuthRefreshItemResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_refresh_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_auth_refresh_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_refresh_item' is set
if ('auth_refresh_item' not in params) or (params['auth_refresh_item'] is None):
raise ValueError("Missing the required parameter `auth_refresh_item` when calling `create_auth_refresh_item`")
resource_path = '/platform/3/auth/refresh'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'auth_refresh_item' in params:
body_params = params['auth_refresh_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateAuthRefreshItemResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_auth_role(self, auth_role, **kwargs):
"""
Create a new role.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_auth_role(auth_role, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AuthRoleCreateParams auth_role: (required)
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_role']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_auth_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_role' is set
if ('auth_role' not in params) or (params['auth_role'] is None):
raise ValueError("Missing the required parameter `auth_role` when calling `create_auth_role`")
resource_path = '/platform/1/auth/roles'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'auth_role' in params:
body_params = params['auth_role']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_auth_user(self, auth_user, **kwargs):
"""
Create a new user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_auth_user(auth_user, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AuthUserCreateParams auth_user: (required)
:param str zone: Optional zone.
:param str provider: Optional provider type.
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_user', 'zone', 'provider']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_auth_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_user' is set
if ('auth_user' not in params) or (params['auth_user'] is None):
raise ValueError("Missing the required parameter `auth_user` when calling `create_auth_user`")
resource_path = '/platform/1/auth/users'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'zone' in params:
query_params['zone'] = params['zone']
if 'provider' in params:
query_params['provider'] = params['provider']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'auth_user' in params:
body_params = params['auth_user']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_mapping_identity(self, mapping_identity, **kwargs):
"""
Manually set or modify a mapping between two personae.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_mapping_identity(mapping_identity, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param MappingIdentityCreateParams mapping_identity: (required)
:param bool _2way: Create a bi-directional mapping from source to target and target to source.
:param str zone: Optional zone.
:param bool replace: Replace existing mappings.
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['mapping_identity', '_2way', 'zone', 'replace']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_mapping_identity" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'mapping_identity' is set
if ('mapping_identity' not in params) or (params['mapping_identity'] is None):
raise ValueError("Missing the required parameter `mapping_identity` when calling `create_mapping_identity`")
resource_path = '/platform/1/auth/mapping/identities'.replace('{format}', 'json')
path_params = {}
query_params = {}
if '_2way' in params:
query_params['2way'] = params['_2way']
if 'zone' in params:
query_params['zone'] = params['zone']
if 'replace' in params:
query_params['replace'] = params['replace']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'mapping_identity' in params:
body_params = params['mapping_identity']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_mapping_identity_0(self, mapping_identity, **kwargs):
"""
Manually set or modify a mapping between two personae.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_mapping_identity_0(mapping_identity, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Empty mapping_identity: (required)
:param str type: Desired mapping target to fetch/generate.
:param str zone: Optional zone.
:return: MappingIdentities
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['mapping_identity', 'type', 'zone']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_mapping_identity_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'mapping_identity' is set
if ('mapping_identity' not in params) or (params['mapping_identity'] is None):
raise ValueError("Missing the required parameter `mapping_identity` when calling `create_mapping_identity_0`")
resource_path = '/platform/1/auth/mapping/identities/{MappingIdentityId}'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'type' in params:
query_params['type'] = params['type']
if 'zone' in params:
query_params['zone'] = params['zone']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'mapping_identity' in params:
body_params = params['mapping_identity']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MappingIdentities',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_providers_ads_item(self, providers_ads_item, **kwargs):
"""
Create a new ADS provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_providers_ads_item(providers_ads_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProvidersAdsItem providers_ads_item: (required)
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_ads_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_providers_ads_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_ads_item' is set
if ('providers_ads_item' not in params) or (params['providers_ads_item'] is None):
raise ValueError("Missing the required parameter `providers_ads_item` when calling `create_providers_ads_item`")
resource_path = '/platform/3/auth/providers/ads'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'providers_ads_item' in params:
body_params = params['providers_ads_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_providers_file_item(self, providers_file_item, **kwargs):
"""
Create a new file provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_providers_file_item(providers_file_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProvidersFileItem providers_file_item: (required)
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_file_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_providers_file_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_file_item' is set
if ('providers_file_item' not in params) or (params['providers_file_item'] is None):
raise ValueError("Missing the required parameter `providers_file_item` when calling `create_providers_file_item`")
resource_path = '/platform/1/auth/providers/file'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'providers_file_item' in params:
body_params = params['providers_file_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_providers_krb5_item(self, providers_krb5_item, **kwargs):
"""
Create a new KRB5 provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_providers_krb5_item(providers_krb5_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProvidersKrb5Item providers_krb5_item: (required)
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_krb5_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_providers_krb5_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_krb5_item' is set
if ('providers_krb5_item' not in params) or (params['providers_krb5_item'] is None):
raise ValueError("Missing the required parameter `providers_krb5_item` when calling `create_providers_krb5_item`")
resource_path = '/platform/3/auth/providers/krb5'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'providers_krb5_item' in params:
body_params = params['providers_krb5_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_providers_ldap_item(self, providers_ldap_item, **kwargs):
"""
Create a new LDAP provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_providers_ldap_item(providers_ldap_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProvidersLdapItem providers_ldap_item: (required)
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_ldap_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_providers_ldap_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_ldap_item' is set
if ('providers_ldap_item' not in params) or (params['providers_ldap_item'] is None):
raise ValueError("Missing the required parameter `providers_ldap_item` when calling `create_providers_ldap_item`")
resource_path = '/platform/3/auth/providers/ldap'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'providers_ldap_item' in params:
body_params = params['providers_ldap_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_providers_nis_item(self, providers_nis_item, **kwargs):
"""
Create a new NIS provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_providers_nis_item(providers_nis_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProvidersNisItem providers_nis_item: (required)
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_nis_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_providers_nis_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_nis_item' is set
if ('providers_nis_item' not in params) or (params['providers_nis_item'] is None):
raise ValueError("Missing the required parameter `providers_nis_item` when calling `create_providers_nis_item`")
resource_path = '/platform/3/auth/providers/nis'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'providers_nis_item' in params:
body_params = params['providers_nis_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_settings_krb5_domain(self, settings_krb5_domain, **kwargs):
"""
Create a new krb5 domain.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_settings_krb5_domain(settings_krb5_domain, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SettingsKrb5DomainCreateParams settings_krb5_domain: (required)
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['settings_krb5_domain']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_settings_krb5_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'settings_krb5_domain' is set
if ('settings_krb5_domain' not in params) or (params['settings_krb5_domain'] is None):
raise ValueError("Missing the required parameter `settings_krb5_domain` when calling `create_settings_krb5_domain`")
resource_path = '/platform/1/auth/settings/krb5/domains'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'settings_krb5_domain' in params:
body_params = params['settings_krb5_domain']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_settings_krb5_realm(self, settings_krb5_realm, **kwargs):
"""
Create a new krb5 realm.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_settings_krb5_realm(settings_krb5_realm, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SettingsKrb5RealmCreateParams settings_krb5_realm: (required)
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['settings_krb5_realm']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_settings_krb5_realm" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'settings_krb5_realm' is set
if ('settings_krb5_realm' not in params) or (params['settings_krb5_realm'] is None):
raise ValueError("Missing the required parameter `settings_krb5_realm` when calling `create_settings_krb5_realm`")
resource_path = '/platform/1/auth/settings/krb5/realms'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'settings_krb5_realm' in params:
body_params = params['settings_krb5_realm']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_auth_group(self, auth_group_id, **kwargs):
"""
Delete the group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_auth_group(auth_group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str auth_group_id: Delete the group. (required)
:param bool cached: If true, flush the group from the cache.
:param str zone: Filter groups by zone.
:param str provider: Filter groups by provider.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_group_id', 'cached', 'zone', 'provider']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_auth_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_group_id' is set
if ('auth_group_id' not in params) or (params['auth_group_id'] is None):
raise ValueError("Missing the required parameter `auth_group_id` when calling `delete_auth_group`")
resource_path = '/platform/1/auth/groups/{AuthGroupId}'.replace('{format}', 'json')
path_params = {}
if 'auth_group_id' in params:
path_params['AuthGroupId'] = params['auth_group_id']
query_params = {}
if 'cached' in params:
query_params['cached'] = params['cached']
if 'zone' in params:
query_params['zone'] = params['zone']
if 'provider' in params:
query_params['provider'] = params['provider']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_auth_groups(self, **kwargs):
"""
Flush the groups cache.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_auth_groups(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param bool cached: If true, only flush cached objects.
:param str zone: Filter groups by zone.
:param str provider: Filter groups by provider.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cached', 'zone', 'provider']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_auth_groups" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/groups'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'cached' in params:
query_params['cached'] = params['cached']
if 'zone' in params:
query_params['zone'] = params['zone']
if 'provider' in params:
query_params['provider'] = params['provider']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_auth_role(self, auth_role_id, **kwargs):
"""
Delete the role.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_auth_role(auth_role_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str auth_role_id: Delete the role. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_role_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_auth_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_role_id' is set
if ('auth_role_id' not in params) or (params['auth_role_id'] is None):
raise ValueError("Missing the required parameter `auth_role_id` when calling `delete_auth_role`")
resource_path = '/platform/1/auth/roles/{AuthRoleId}'.replace('{format}', 'json')
path_params = {}
if 'auth_role_id' in params:
path_params['AuthRoleId'] = params['auth_role_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_auth_user(self, auth_user_id, **kwargs):
"""
Delete the user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_auth_user(auth_user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str auth_user_id: Delete the user. (required)
:param bool cached: If true, flush the user from the cache.
:param str zone: Filter users by zone.
:param str provider: Filter users by provider.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_user_id', 'cached', 'zone', 'provider']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_auth_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_user_id' is set
if ('auth_user_id' not in params) or (params['auth_user_id'] is None):
raise ValueError("Missing the required parameter `auth_user_id` when calling `delete_auth_user`")
resource_path = '/platform/1/auth/users/{AuthUserId}'.replace('{format}', 'json')
path_params = {}
if 'auth_user_id' in params:
path_params['AuthUserId'] = params['auth_user_id']
query_params = {}
if 'cached' in params:
query_params['cached'] = params['cached']
if 'zone' in params:
query_params['zone'] = params['zone']
if 'provider' in params:
query_params['provider'] = params['provider']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_auth_users(self, **kwargs):
"""
Flush the users cache.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_auth_users(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param bool cached: If true, only flush cached objects.
:param str zone: Filter users by zone.
:param str provider: Filter users by provider.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cached', 'zone', 'provider']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_auth_users" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/users'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'cached' in params:
query_params['cached'] = params['cached']
if 'zone' in params:
query_params['zone'] = params['zone']
if 'provider' in params:
query_params['provider'] = params['provider']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_mapping_identities(self, **kwargs):
"""
Flush the entire idmap cache.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_mapping_identities(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter to apply when deleting identity mappings.
:param str zone: Optional zone.
:param bool remove: Delete mapping instead of flush mapping cache.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter', 'zone', 'remove']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_mapping_identities" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/mapping/identities'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
if 'zone' in params:
query_params['zone'] = params['zone']
if 'remove' in params:
query_params['remove'] = params['remove']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_mapping_identity(self, mapping_identity_id, **kwargs):
"""
Flush the entire idmap cache.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_mapping_identity(mapping_identity_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str mapping_identity_id: Flush the entire idmap cache. (required)
:param str zone: Optional zone.
:param bool _2way: Delete the bi-directional mapping from source to target and target to source.
:param str target: Target identity persona.
:param bool remove: Delete mapping instead of flush mapping from cache.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['mapping_identity_id', 'zone', '_2way', 'target', 'remove']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_mapping_identity" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'mapping_identity_id' is set
if ('mapping_identity_id' not in params) or (params['mapping_identity_id'] is None):
raise ValueError("Missing the required parameter `mapping_identity_id` when calling `delete_mapping_identity`")
resource_path = '/platform/1/auth/mapping/identities/{MappingIdentityId}'.replace('{format}', 'json')
path_params = {}
if 'mapping_identity_id' in params:
path_params['MappingIdentityId'] = params['mapping_identity_id']
query_params = {}
if 'zone' in params:
query_params['zone'] = params['zone']
if '_2way' in params:
query_params['2way'] = params['_2way']
if 'target' in params:
query_params['target'] = params['target']
if 'remove' in params:
query_params['remove'] = params['remove']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_providers_ads_by_id(self, providers_ads_id, **kwargs):
"""
Delete the ADS provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_providers_ads_by_id(providers_ads_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str providers_ads_id: Delete the ADS provider. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_ads_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_providers_ads_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_ads_id' is set
if ('providers_ads_id' not in params) or (params['providers_ads_id'] is None):
raise ValueError("Missing the required parameter `providers_ads_id` when calling `delete_providers_ads_by_id`")
resource_path = '/platform/3/auth/providers/ads/{ProvidersAdsId}'.replace('{format}', 'json')
path_params = {}
if 'providers_ads_id' in params:
path_params['ProvidersAdsId'] = params['providers_ads_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_providers_file_by_id(self, providers_file_id, **kwargs):
"""
Delete the file provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_providers_file_by_id(providers_file_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str providers_file_id: Delete the file provider. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_file_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_providers_file_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_file_id' is set
if ('providers_file_id' not in params) or (params['providers_file_id'] is None):
raise ValueError("Missing the required parameter `providers_file_id` when calling `delete_providers_file_by_id`")
resource_path = '/platform/1/auth/providers/file/{ProvidersFileId}'.replace('{format}', 'json')
path_params = {}
if 'providers_file_id' in params:
path_params['ProvidersFileId'] = params['providers_file_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_providers_krb5_by_id(self, providers_krb5_id, **kwargs):
"""
Delete the KRB5 provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_providers_krb5_by_id(providers_krb5_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str providers_krb5_id: Delete the KRB5 provider. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_krb5_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_providers_krb5_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_krb5_id' is set
if ('providers_krb5_id' not in params) or (params['providers_krb5_id'] is None):
raise ValueError("Missing the required parameter `providers_krb5_id` when calling `delete_providers_krb5_by_id`")
resource_path = '/platform/3/auth/providers/krb5/{ProvidersKrb5Id}'.replace('{format}', 'json')
path_params = {}
if 'providers_krb5_id' in params:
path_params['ProvidersKrb5Id'] = params['providers_krb5_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_providers_ldap_by_id(self, providers_ldap_id, **kwargs):
"""
Delete the LDAP provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_providers_ldap_by_id(providers_ldap_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str providers_ldap_id: Delete the LDAP provider. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_ldap_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_providers_ldap_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_ldap_id' is set
if ('providers_ldap_id' not in params) or (params['providers_ldap_id'] is None):
raise ValueError("Missing the required parameter `providers_ldap_id` when calling `delete_providers_ldap_by_id`")
resource_path = '/platform/3/auth/providers/ldap/{ProvidersLdapId}'.replace('{format}', 'json')
path_params = {}
if 'providers_ldap_id' in params:
path_params['ProvidersLdapId'] = params['providers_ldap_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_providers_local_by_id(self, providers_local_id, **kwargs):
"""
Delete the local provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_providers_local_by_id(providers_local_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str providers_local_id: Delete the local provider. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_local_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_providers_local_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_local_id' is set
if ('providers_local_id' not in params) or (params['providers_local_id'] is None):
raise ValueError("Missing the required parameter `providers_local_id` when calling `delete_providers_local_by_id`")
resource_path = '/platform/1/auth/providers/local/{ProvidersLocalId}'.replace('{format}', 'json')
path_params = {}
if 'providers_local_id' in params:
path_params['ProvidersLocalId'] = params['providers_local_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_providers_nis_by_id(self, providers_nis_id, **kwargs):
"""
Delete the NIS provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_providers_nis_by_id(providers_nis_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str providers_nis_id: Delete the NIS provider. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_nis_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_providers_nis_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_nis_id' is set
if ('providers_nis_id' not in params) or (params['providers_nis_id'] is None):
raise ValueError("Missing the required parameter `providers_nis_id` when calling `delete_providers_nis_by_id`")
resource_path = '/platform/3/auth/providers/nis/{ProvidersNisId}'.replace('{format}', 'json')
path_params = {}
if 'providers_nis_id' in params:
path_params['ProvidersNisId'] = params['providers_nis_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_settings_krb5_domain(self, settings_krb5_domain_id, **kwargs):
"""
Remove a krb5 domain.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_settings_krb5_domain(settings_krb5_domain_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str settings_krb5_domain_id: Remove a krb5 domain. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['settings_krb5_domain_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_settings_krb5_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'settings_krb5_domain_id' is set
if ('settings_krb5_domain_id' not in params) or (params['settings_krb5_domain_id'] is None):
raise ValueError("Missing the required parameter `settings_krb5_domain_id` when calling `delete_settings_krb5_domain`")
resource_path = '/platform/1/auth/settings/krb5/domains/{SettingsKrb5DomainId}'.replace('{format}', 'json')
path_params = {}
if 'settings_krb5_domain_id' in params:
path_params['SettingsKrb5DomainId'] = params['settings_krb5_domain_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_settings_krb5_realm(self, settings_krb5_realm_id, **kwargs):
"""
Remove a realm.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_settings_krb5_realm(settings_krb5_realm_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str settings_krb5_realm_id: Remove a realm. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['settings_krb5_realm_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_settings_krb5_realm" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'settings_krb5_realm_id' is set
if ('settings_krb5_realm_id' not in params) or (params['settings_krb5_realm_id'] is None):
raise ValueError("Missing the required parameter `settings_krb5_realm_id` when calling `delete_settings_krb5_realm`")
resource_path = '/platform/1/auth/settings/krb5/realms/{SettingsKrb5RealmId}'.replace('{format}', 'json')
path_params = {}
if 'settings_krb5_realm_id' in params:
path_params['SettingsKrb5RealmId'] = params['settings_krb5_realm_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_auth_access_user(self, auth_access_user, **kwargs):
"""
Determine user's access rights to a file
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_auth_access_user(auth_access_user, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str auth_access_user: Determine user's access rights to a file (required)
:param str path: Path to the file. Must be within /ifs.
:param str zone: Access zone the user is in.
:param bool numeric: Show the user's numeric identifier.
:return: AuthAccess
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_access_user', 'path', 'zone', 'numeric']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_auth_access_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_access_user' is set
if ('auth_access_user' not in params) or (params['auth_access_user'] is None):
raise ValueError("Missing the required parameter `auth_access_user` when calling `get_auth_access_user`")
resource_path = '/platform/1/auth/access/{AuthAccessUser}'.replace('{format}', 'json')
path_params = {}
if 'auth_access_user' in params:
path_params['AuthAccessUser'] = params['auth_access_user']
query_params = {}
if 'path' in params:
query_params['path'] = params['path']
if 'zone' in params:
query_params['zone'] = params['zone']
if 'numeric' in params:
query_params['numeric'] = params['numeric']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthAccess',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_auth_group(self, auth_group_id, **kwargs):
"""
Retrieve the group information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_auth_group(auth_group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str auth_group_id: Retrieve the group information. (required)
:param bool cached: If true, only return cached objects.
:param bool resolve_names: Resolve names of personas.
:param str zone: Filter groups by zone.
:param str provider: Filter groups by provider.
:return: AuthGroups
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_group_id', 'cached', 'resolve_names', 'zone', 'provider']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_auth_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_group_id' is set
if ('auth_group_id' not in params) or (params['auth_group_id'] is None):
raise ValueError("Missing the required parameter `auth_group_id` when calling `get_auth_group`")
resource_path = '/platform/1/auth/groups/{AuthGroupId}'.replace('{format}', 'json')
path_params = {}
if 'auth_group_id' in params:
path_params['AuthGroupId'] = params['auth_group_id']
query_params = {}
if 'cached' in params:
query_params['cached'] = params['cached']
if 'resolve_names' in params:
query_params['resolve_names'] = params['resolve_names']
if 'zone' in params:
query_params['zone'] = params['zone']
if 'provider' in params:
query_params['provider'] = params['provider']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthGroups',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_auth_id(self, **kwargs):
"""
Retrieve the current security token.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_auth_id(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: AuthId
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_auth_id" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/id'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthId',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_auth_log_level(self, **kwargs):
"""
Get the current authentications service and netlogon logging level.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_auth_log_level(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: AuthLogLevel
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_auth_log_level" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/3/auth/log-level'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthLogLevel',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_auth_netgroup(self, auth_netgroup_id, **kwargs):
"""
Retrieve the user information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_auth_netgroup(auth_netgroup_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str auth_netgroup_id: Retrieve the user information. (required)
:param bool ignore_errors: Ignore netgroup errors.
:param bool recursive: Perform recursive search.
:param str zone: Filter users by zone.
:param str provider: Filter users by provider.
:return: AuthNetgroups
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_netgroup_id', 'ignore_errors', 'recursive', 'zone', 'provider']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_auth_netgroup" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_netgroup_id' is set
if ('auth_netgroup_id' not in params) or (params['auth_netgroup_id'] is None):
raise ValueError("Missing the required parameter `auth_netgroup_id` when calling `get_auth_netgroup`")
resource_path = '/platform/1/auth/netgroups/{AuthNetgroupId}'.replace('{format}', 'json')
path_params = {}
if 'auth_netgroup_id' in params:
path_params['AuthNetgroupId'] = params['auth_netgroup_id']
query_params = {}
if 'ignore_errors' in params:
query_params['ignore_errors'] = params['ignore_errors']
if 'recursive' in params:
query_params['recursive'] = params['recursive']
if 'zone' in params:
query_params['zone'] = params['zone']
if 'provider' in params:
query_params['provider'] = params['provider']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthNetgroups',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_auth_privileges(self, **kwargs):
"""
List all privileges.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_auth_privileges(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: AuthPrivileges
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_auth_privileges" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/privileges'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthPrivileges',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_auth_role(self, auth_role_id, **kwargs):
"""
Retrieve the role information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_auth_role(auth_role_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str auth_role_id: Retrieve the role information. (required)
:param bool resolve_names: Resolve names of personas.
:return: AuthRoles
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_role_id', 'resolve_names']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_auth_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_role_id' is set
if ('auth_role_id' not in params) or (params['auth_role_id'] is None):
raise ValueError("Missing the required parameter `auth_role_id` when calling `get_auth_role`")
resource_path = '/platform/1/auth/roles/{AuthRoleId}'.replace('{format}', 'json')
path_params = {}
if 'auth_role_id' in params:
path_params['AuthRoleId'] = params['auth_role_id']
query_params = {}
if 'resolve_names' in params:
query_params['resolve_names'] = params['resolve_names']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthRoles',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_auth_shells(self, **kwargs):
"""
List all shells.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_auth_shells(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: AuthShells
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_auth_shells" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/shells'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthShells',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_auth_user(self, auth_user_id, **kwargs):
"""
Retrieve the user information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_auth_user(auth_user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str auth_user_id: Retrieve the user information. (required)
:param bool cached: If true, only return cached objects.
:param bool resolve_names: Resolve names of personas.
:param str zone: Filter users by zone.
:param str provider: Filter users by provider.
:return: AuthUsers
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_user_id', 'cached', 'resolve_names', 'zone', 'provider']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_auth_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_user_id' is set
if ('auth_user_id' not in params) or (params['auth_user_id'] is None):
raise ValueError("Missing the required parameter `auth_user_id` when calling `get_auth_user`")
resource_path = '/platform/1/auth/users/{AuthUserId}'.replace('{format}', 'json')
path_params = {}
if 'auth_user_id' in params:
path_params['AuthUserId'] = params['auth_user_id']
query_params = {}
if 'cached' in params:
query_params['cached'] = params['cached']
if 'resolve_names' in params:
query_params['resolve_names'] = params['resolve_names']
if 'zone' in params:
query_params['zone'] = params['zone']
if 'provider' in params:
query_params['provider'] = params['provider']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthUsers',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_auth_wellknown(self, auth_wellknown_id, **kwargs):
"""
Retrieve the wellknown persona.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_auth_wellknown(auth_wellknown_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str auth_wellknown_id: Retrieve the wellknown persona. (required)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: AuthWellknowns
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_wellknown_id', 'scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_auth_wellknown" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_wellknown_id' is set
if ('auth_wellknown_id' not in params) or (params['auth_wellknown_id'] is None):
raise ValueError("Missing the required parameter `auth_wellknown_id` when calling `get_auth_wellknown`")
resource_path = '/platform/1/auth/wellknowns/{AuthWellknownId}'.replace('{format}', 'json')
path_params = {}
if 'auth_wellknown_id' in params:
path_params['AuthWellknownId'] = params['auth_wellknown_id']
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthWellknowns',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_auth_wellknowns(self, **kwargs):
"""
List all wellknown personas.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_auth_wellknowns(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: AuthWellknowns
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_auth_wellknowns" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/wellknowns'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthWellknowns',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_mapping_dump(self, **kwargs):
"""
Retrieve all identity mappings (uid, gid, sid, and on-disk) for the supplied source persona.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_mapping_dump(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param bool nocreate: Idmap should attempt to create missing identity mappings.
:param str zone: Optional zone.
:return: MappingDump
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['nocreate', 'zone']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_mapping_dump" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/3/auth/mapping/dump'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'nocreate' in params:
query_params['nocreate'] = params['nocreate']
if 'zone' in params:
query_params['zone'] = params['zone']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MappingDump',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_mapping_identity(self, mapping_identity_id, **kwargs):
"""
Retrieve all identity mappings (uid, gid, sid, and on-disk) for the supplied source persona.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_mapping_identity(mapping_identity_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str mapping_identity_id: Retrieve all identity mappings (uid, gid, sid, and on-disk) for the supplied source persona. (required)
:param bool nocreate: Idmap should attempt to create missing identity mappings.
:param str zone: Optional zone.
:return: MappingIdentities
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['mapping_identity_id', 'nocreate', 'zone']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_mapping_identity" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'mapping_identity_id' is set
if ('mapping_identity_id' not in params) or (params['mapping_identity_id'] is None):
raise ValueError("Missing the required parameter `mapping_identity_id` when calling `get_mapping_identity`")
resource_path = '/platform/1/auth/mapping/identities/{MappingIdentityId}'.replace('{format}', 'json')
path_params = {}
if 'mapping_identity_id' in params:
path_params['MappingIdentityId'] = params['mapping_identity_id']
query_params = {}
if 'nocreate' in params:
query_params['nocreate'] = params['nocreate']
if 'zone' in params:
query_params['zone'] = params['zone']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MappingIdentities',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_mapping_users_lookup(self, **kwargs):
"""
Retrieve the user information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_mapping_users_lookup(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int primary_gid: The user's primary group ID.
:param int uid: The user ID.
:param str zone: The zone the user belongs to.
:param list[int] gid: The IDs of the groups that the user belongs to.
:param str user: The user name.
:param str kerberos_principal: The Kerberos principal name, of the form user@realm.
:return: MappingUsersLookup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['primary_gid', 'uid', 'zone', 'gid', 'user', 'kerberos_principal']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_mapping_users_lookup" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/mapping/users/lookup'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'primary_gid' in params:
query_params['primary_gid'] = params['primary_gid']
if 'uid' in params:
query_params['uid'] = params['uid']
if 'zone' in params:
query_params['zone'] = params['zone']
if 'gid' in params:
query_params['gid'] = params['gid']
if 'user' in params:
query_params['user'] = params['user']
if 'kerberos_principal' in params:
query_params['kerberos_principal'] = params['kerberos_principal']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MappingUsersLookup',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_mapping_users_rules(self, **kwargs):
"""
Retrieve the user mapping rules.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_mapping_users_rules(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str zone: The zone to which the user mapping applies.
:return: MappingUsersRules
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['zone']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_mapping_users_rules" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/mapping/users/rules'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'zone' in params:
query_params['zone'] = params['zone']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MappingUsersRules',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_providers_ads_by_id(self, providers_ads_id, **kwargs):
"""
Retrieve the ADS provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_providers_ads_by_id(providers_ads_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str providers_ads_id: Retrieve the ADS provider. (required)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: ProvidersAds
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_ads_id', 'scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_providers_ads_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_ads_id' is set
if ('providers_ads_id' not in params) or (params['providers_ads_id'] is None):
raise ValueError("Missing the required parameter `providers_ads_id` when calling `get_providers_ads_by_id`")
resource_path = '/platform/3/auth/providers/ads/{ProvidersAdsId}'.replace('{format}', 'json')
path_params = {}
if 'providers_ads_id' in params:
path_params['ProvidersAdsId'] = params['providers_ads_id']
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvidersAds',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_providers_file_by_id(self, providers_file_id, **kwargs):
"""
Retrieve the file provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_providers_file_by_id(providers_file_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str providers_file_id: Retrieve the file provider. (required)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: ProvidersFile
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_file_id', 'scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_providers_file_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_file_id' is set
if ('providers_file_id' not in params) or (params['providers_file_id'] is None):
raise ValueError("Missing the required parameter `providers_file_id` when calling `get_providers_file_by_id`")
resource_path = '/platform/1/auth/providers/file/{ProvidersFileId}'.replace('{format}', 'json')
path_params = {}
if 'providers_file_id' in params:
path_params['ProvidersFileId'] = params['providers_file_id']
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvidersFile',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_providers_krb5_by_id(self, providers_krb5_id, **kwargs):
"""
Retrieve the KRB5 provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_providers_krb5_by_id(providers_krb5_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str providers_krb5_id: Retrieve the KRB5 provider. (required)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: ProvidersKrb5
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_krb5_id', 'scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_providers_krb5_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_krb5_id' is set
if ('providers_krb5_id' not in params) or (params['providers_krb5_id'] is None):
raise ValueError("Missing the required parameter `providers_krb5_id` when calling `get_providers_krb5_by_id`")
resource_path = '/platform/3/auth/providers/krb5/{ProvidersKrb5Id}'.replace('{format}', 'json')
path_params = {}
if 'providers_krb5_id' in params:
path_params['ProvidersKrb5Id'] = params['providers_krb5_id']
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvidersKrb5',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_providers_ldap_by_id(self, providers_ldap_id, **kwargs):
"""
Retrieve the LDAP provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_providers_ldap_by_id(providers_ldap_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str providers_ldap_id: Retrieve the LDAP provider. (required)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: ProvidersLdap
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_ldap_id', 'scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_providers_ldap_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_ldap_id' is set
if ('providers_ldap_id' not in params) or (params['providers_ldap_id'] is None):
raise ValueError("Missing the required parameter `providers_ldap_id` when calling `get_providers_ldap_by_id`")
resource_path = '/platform/3/auth/providers/ldap/{ProvidersLdapId}'.replace('{format}', 'json')
path_params = {}
if 'providers_ldap_id' in params:
path_params['ProvidersLdapId'] = params['providers_ldap_id']
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvidersLdap',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_providers_local(self, **kwargs):
"""
List all local providers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_providers_local(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: ProvidersLocal
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_providers_local" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/providers/local'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvidersLocal',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_providers_local_by_id(self, providers_local_id, **kwargs):
"""
Retrieve the local provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_providers_local_by_id(providers_local_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str providers_local_id: Retrieve the local provider. (required)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: ProvidersLocal
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_local_id', 'scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_providers_local_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_local_id' is set
if ('providers_local_id' not in params) or (params['providers_local_id'] is None):
raise ValueError("Missing the required parameter `providers_local_id` when calling `get_providers_local_by_id`")
resource_path = '/platform/1/auth/providers/local/{ProvidersLocalId}'.replace('{format}', 'json')
path_params = {}
if 'providers_local_id' in params:
path_params['ProvidersLocalId'] = params['providers_local_id']
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvidersLocal',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_providers_nis_by_id(self, providers_nis_id, **kwargs):
"""
Retrieve the NIS provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_providers_nis_by_id(providers_nis_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str providers_nis_id: Retrieve the NIS provider. (required)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: ProvidersNis
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_nis_id', 'scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_providers_nis_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_nis_id' is set
if ('providers_nis_id' not in params) or (params['providers_nis_id'] is None):
raise ValueError("Missing the required parameter `providers_nis_id` when calling `get_providers_nis_by_id`")
resource_path = '/platform/3/auth/providers/nis/{ProvidersNisId}'.replace('{format}', 'json')
path_params = {}
if 'providers_nis_id' in params:
path_params['ProvidersNisId'] = params['providers_nis_id']
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvidersNis',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_providers_summary(self, **kwargs):
"""
Retrieve the summary information.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_providers_summary(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: ProvidersSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_providers_summary" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/3/auth/providers/summary'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvidersSummary',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_settings_acls(self, **kwargs):
"""
Retrieve the ACL policy settings and preset configurations.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_settings_acls(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str preset: If specified the preset configuration values for all applicable ACL policies are returned.
:return: SettingsAcls
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['preset']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_settings_acls" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/3/auth/settings/acls'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'preset' in params:
query_params['preset'] = params['preset']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SettingsAcls',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_settings_global(self, **kwargs):
"""
Retrieve the global settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_settings_global(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:param str zone: Zone which contains any per-zone settings.
:return: SettingsGlobal
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope', 'zone']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_settings_global" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/settings/global'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
if 'zone' in params:
query_params['zone'] = params['zone']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SettingsGlobal',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_settings_krb5_defaults(self, **kwargs):
"""
Retrieve the krb5 settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_settings_krb5_defaults(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: SettingsKrb5Defaults
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_settings_krb5_defaults" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/settings/krb5/defaults'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SettingsKrb5Defaults',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_settings_krb5_domain(self, settings_krb5_domain_id, **kwargs):
"""
View the krb5 domain settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_settings_krb5_domain(settings_krb5_domain_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str settings_krb5_domain_id: View the krb5 domain settings. (required)
:return: SettingsKrb5Domains
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['settings_krb5_domain_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_settings_krb5_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'settings_krb5_domain_id' is set
if ('settings_krb5_domain_id' not in params) or (params['settings_krb5_domain_id'] is None):
raise ValueError("Missing the required parameter `settings_krb5_domain_id` when calling `get_settings_krb5_domain`")
resource_path = '/platform/1/auth/settings/krb5/domains/{SettingsKrb5DomainId}'.replace('{format}', 'json')
path_params = {}
if 'settings_krb5_domain_id' in params:
path_params['SettingsKrb5DomainId'] = params['settings_krb5_domain_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SettingsKrb5Domains',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_settings_krb5_realm(self, settings_krb5_realm_id, **kwargs):
"""
Retrieve the krb5 settings for realms.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_settings_krb5_realm(settings_krb5_realm_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str settings_krb5_realm_id: Retrieve the krb5 settings for realms. (required)
:return: SettingsKrb5Realms
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['settings_krb5_realm_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_settings_krb5_realm" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'settings_krb5_realm_id' is set
if ('settings_krb5_realm_id' not in params) or (params['settings_krb5_realm_id'] is None):
raise ValueError("Missing the required parameter `settings_krb5_realm_id` when calling `get_settings_krb5_realm`")
resource_path = '/platform/1/auth/settings/krb5/realms/{SettingsKrb5RealmId}'.replace('{format}', 'json')
path_params = {}
if 'settings_krb5_realm_id' in params:
path_params['SettingsKrb5RealmId'] = params['settings_krb5_realm_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SettingsKrb5Realms',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_settings_mapping(self, **kwargs):
"""
Retrieve the mapping settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_settings_mapping(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:param str zone: Access zone which contains mapping settings.
:return: SettingsMapping
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope', 'zone']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_settings_mapping" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/settings/mapping'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
if 'zone' in params:
query_params['zone'] = params['zone']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SettingsMapping',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def list_auth_groups(self, **kwargs):
"""
List all groups.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_auth_groups(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain: Filter groups by domain.
:param str zone: Filter groups by zone.
:param str resume: Continue returning results from previous call using this token (token should come from the previous call, resume cannot be used with other options).
:param bool cached: If true, only return cached objects.
:param bool resolve_names: Resolve names of personas.
:param str filter: Filter groups by name prefix.
:param int limit: Return no more than this many results at once (see resume).
:param str provider: Filter groups by provider.
:param bool query_member_of: Enumerate all groups that a group is a member of.
:return: AuthGroupsExtended
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain', 'zone', 'resume', 'cached', 'resolve_names', 'filter', 'limit', 'provider', 'query_member_of']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_auth_groups" % key
)
params[key] = val
del params['kwargs']
if 'limit' in params and params['limit'] < 1.0:
raise ValueError("Invalid value for parameter `limit` when calling `list_auth_groups`, must be a value greater than or equal to `1.0`")
resource_path = '/platform/1/auth/groups'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'domain' in params:
query_params['domain'] = params['domain']
if 'zone' in params:
query_params['zone'] = params['zone']
if 'resume' in params:
query_params['resume'] = params['resume']
if 'cached' in params:
query_params['cached'] = params['cached']
if 'resolve_names' in params:
query_params['resolve_names'] = params['resolve_names']
if 'filter' in params:
query_params['filter'] = params['filter']
if 'limit' in params:
query_params['limit'] = params['limit']
if 'provider' in params:
query_params['provider'] = params['provider']
if 'query_member_of' in params:
query_params['query_member_of'] = params['query_member_of']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthGroupsExtended',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def list_auth_roles(self, **kwargs):
"""
List all roles.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_auth_roles(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str sort: The field that will be used for sorting.
:param bool resolve_names: Filter users by zone.
:param str dir: The direction of the sort.
:return: AuthRolesExtended
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['sort', 'resolve_names', 'dir']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_auth_roles" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/roles'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'sort' in params:
query_params['sort'] = params['sort']
if 'resolve_names' in params:
query_params['resolve_names'] = params['resolve_names']
if 'dir' in params:
query_params['dir'] = params['dir']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthRolesExtended',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def list_auth_users(self, **kwargs):
"""
List all users.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_auth_users(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str domain: Filter users by domain.
:param str zone: Filter users by zone.
:param str resume: Continue returning results from previous call using this token (token should come from the previous call, resume cannot be used with other options).
:param bool cached: If true, only return cached objects.
:param bool resolve_names: Resolve names of personas.
:param str filter: Filter users by name prefix.
:param int limit: Return no more than this many results at once (see resume).
:param str provider: Filter users by provider.
:param bool query_member_of: Enumerate all users that a group is a member of.
:return: AuthUsersExtended
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['domain', 'zone', 'resume', 'cached', 'resolve_names', 'filter', 'limit', 'provider', 'query_member_of']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_auth_users" % key
)
params[key] = val
del params['kwargs']
if 'limit' in params and params['limit'] < 1.0:
raise ValueError("Invalid value for parameter `limit` when calling `list_auth_users`, must be a value greater than or equal to `1.0`")
resource_path = '/platform/1/auth/users'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'domain' in params:
query_params['domain'] = params['domain']
if 'zone' in params:
query_params['zone'] = params['zone']
if 'resume' in params:
query_params['resume'] = params['resume']
if 'cached' in params:
query_params['cached'] = params['cached']
if 'resolve_names' in params:
query_params['resolve_names'] = params['resolve_names']
if 'filter' in params:
query_params['filter'] = params['filter']
if 'limit' in params:
query_params['limit'] = params['limit']
if 'provider' in params:
query_params['provider'] = params['provider']
if 'query_member_of' in params:
query_params['query_member_of'] = params['query_member_of']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthUsersExtended',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def list_providers_ads(self, **kwargs):
"""
List all ADS providers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_providers_ads(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: ProvidersAdsExtended
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_providers_ads" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/3/auth/providers/ads'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvidersAdsExtended',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def list_providers_file(self, **kwargs):
"""
List all file providers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_providers_file(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: ProvidersFile
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_providers_file" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/providers/file'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvidersFile',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def list_providers_krb5(self, **kwargs):
"""
List all KRB5 providers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_providers_krb5(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: ProvidersKrb5Extended
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_providers_krb5" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/3/auth/providers/krb5'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvidersKrb5Extended',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def list_providers_ldap(self, **kwargs):
"""
List all LDAP providers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_providers_ldap(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: ProvidersLdap
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_providers_ldap" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/3/auth/providers/ldap'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvidersLdap',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def list_providers_nis(self, **kwargs):
"""
List all NIS providers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_providers_nis(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: ProvidersNisExtended
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['scope']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_providers_nis" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/3/auth/providers/nis'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'scope' in params:
query_params['scope'] = params['scope']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvidersNisExtended',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def list_settings_krb5_domains(self, **kwargs):
"""
Retrieve the krb5 settings for domains.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_settings_krb5_domains(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: SettingsKrb5Domains
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_settings_krb5_domains" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/settings/krb5/domains'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SettingsKrb5Domains',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def list_settings_krb5_realms(self, **kwargs):
"""
Retrieve the krb5 settings for realms.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_settings_krb5_realms(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: SettingsKrb5Realms
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_settings_krb5_realms" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/1/auth/settings/krb5/realms'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SettingsKrb5Realms',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_auth_group(self, auth_group, auth_group_id, **kwargs):
"""
Modify the group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_auth_group(auth_group, auth_group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AuthGroup auth_group: (required)
:param str auth_group_id: Modify the group. (required)
:param str zone: Optional zone.
:param str provider: Optional provider type.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_group', 'auth_group_id', 'zone', 'provider']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_auth_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_group' is set
if ('auth_group' not in params) or (params['auth_group'] is None):
raise ValueError("Missing the required parameter `auth_group` when calling `update_auth_group`")
# verify the required parameter 'auth_group_id' is set
if ('auth_group_id' not in params) or (params['auth_group_id'] is None):
raise ValueError("Missing the required parameter `auth_group_id` when calling `update_auth_group`")
resource_path = '/platform/1/auth/groups/{AuthGroupId}'.replace('{format}', 'json')
path_params = {}
if 'auth_group_id' in params:
path_params['AuthGroupId'] = params['auth_group_id']
query_params = {}
if 'zone' in params:
query_params['zone'] = params['zone']
if 'provider' in params:
query_params['provider'] = params['provider']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'auth_group' in params:
body_params = params['auth_group']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_auth_log_level(self, auth_log_level, **kwargs):
"""
Set the current authentication service and netlogon logging level.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_auth_log_level(auth_log_level, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AuthLogLevelExtended auth_log_level: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_log_level']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_auth_log_level" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_log_level' is set
if ('auth_log_level' not in params) or (params['auth_log_level'] is None):
raise ValueError("Missing the required parameter `auth_log_level` when calling `update_auth_log_level`")
resource_path = '/platform/3/auth/log-level'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'auth_log_level' in params:
body_params = params['auth_log_level']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_auth_role(self, auth_role, auth_role_id, **kwargs):
"""
Modify the role.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_auth_role(auth_role, auth_role_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AuthRole auth_role: (required)
:param str auth_role_id: Modify the role. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_role', 'auth_role_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_auth_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_role' is set
if ('auth_role' not in params) or (params['auth_role'] is None):
raise ValueError("Missing the required parameter `auth_role` when calling `update_auth_role`")
# verify the required parameter 'auth_role_id' is set
if ('auth_role_id' not in params) or (params['auth_role_id'] is None):
raise ValueError("Missing the required parameter `auth_role_id` when calling `update_auth_role`")
resource_path = '/platform/1/auth/roles/{AuthRoleId}'.replace('{format}', 'json')
path_params = {}
if 'auth_role_id' in params:
path_params['AuthRoleId'] = params['auth_role_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'auth_role' in params:
body_params = params['auth_role']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_auth_user(self, auth_user, auth_user_id, **kwargs):
"""
Modify the user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_auth_user(auth_user, auth_user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AuthUser auth_user: (required)
:param str auth_user_id: Modify the user. (required)
:param str zone: Optional zone.
:param str provider: Optional provider type.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['auth_user', 'auth_user_id', 'zone', 'provider']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_auth_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'auth_user' is set
if ('auth_user' not in params) or (params['auth_user'] is None):
raise ValueError("Missing the required parameter `auth_user` when calling `update_auth_user`")
# verify the required parameter 'auth_user_id' is set
if ('auth_user_id' not in params) or (params['auth_user_id'] is None):
raise ValueError("Missing the required parameter `auth_user_id` when calling `update_auth_user`")
resource_path = '/platform/1/auth/users/{AuthUserId}'.replace('{format}', 'json')
path_params = {}
if 'auth_user_id' in params:
path_params['AuthUserId'] = params['auth_user_id']
query_params = {}
if 'zone' in params:
query_params['zone'] = params['zone']
if 'provider' in params:
query_params['provider'] = params['provider']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'auth_user' in params:
body_params = params['auth_user']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_mapping_import(self, mapping_import, **kwargs):
"""
Set or update a list of mappings between two personae.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_mapping_import(mapping_import, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param MappingImport mapping_import: (required)
:param str zone: Optional zone.
:param bool replace: Specify whether existing mappings should be replaced. The default behavior is to leave existing mappings intact and return an error.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['mapping_import', 'zone', 'replace']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_mapping_import" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'mapping_import' is set
if ('mapping_import' not in params) or (params['mapping_import'] is None):
raise ValueError("Missing the required parameter `mapping_import` when calling `update_mapping_import`")
resource_path = '/platform/3/auth/mapping/import'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'zone' in params:
query_params['zone'] = params['zone']
if 'replace' in params:
query_params['replace'] = params['replace']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'mapping_import' in params:
body_params = params['mapping_import']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_mapping_users_rules(self, mapping_users_rules, **kwargs):
"""
Modify the user mapping rules.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_mapping_users_rules(mapping_users_rules, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param MappingUsersRulesExtended mapping_users_rules: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['mapping_users_rules']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_mapping_users_rules" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'mapping_users_rules' is set
if ('mapping_users_rules' not in params) or (params['mapping_users_rules'] is None):
raise ValueError("Missing the required parameter `mapping_users_rules` when calling `update_mapping_users_rules`")
resource_path = '/platform/1/auth/mapping/users/rules'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'mapping_users_rules' in params:
body_params = params['mapping_users_rules']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_providers_ads_by_id(self, providers_ads_id_params, providers_ads_id, **kwargs):
"""
Modify the ADS provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_providers_ads_by_id(providers_ads_id_params, providers_ads_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProvidersAdsIdParams providers_ads_id_params: (required)
:param str providers_ads_id: Modify the ADS provider. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_ads_id_params', 'providers_ads_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_providers_ads_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_ads_id_params' is set
if ('providers_ads_id_params' not in params) or (params['providers_ads_id_params'] is None):
raise ValueError("Missing the required parameter `providers_ads_id_params` when calling `update_providers_ads_by_id`")
# verify the required parameter 'providers_ads_id' is set
if ('providers_ads_id' not in params) or (params['providers_ads_id'] is None):
raise ValueError("Missing the required parameter `providers_ads_id` when calling `update_providers_ads_by_id`")
resource_path = '/platform/3/auth/providers/ads/{ProvidersAdsId}'.replace('{format}', 'json')
path_params = {}
if 'providers_ads_id' in params:
path_params['ProvidersAdsId'] = params['providers_ads_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'providers_ads_id_params' in params:
body_params = params['providers_ads_id_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_providers_file_by_id(self, providers_file_id_params, providers_file_id, **kwargs):
"""
Modify the file provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_providers_file_by_id(providers_file_id_params, providers_file_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProvidersFileIdParams providers_file_id_params: (required)
:param str providers_file_id: Modify the file provider. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_file_id_params', 'providers_file_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_providers_file_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_file_id_params' is set
if ('providers_file_id_params' not in params) or (params['providers_file_id_params'] is None):
raise ValueError("Missing the required parameter `providers_file_id_params` when calling `update_providers_file_by_id`")
# verify the required parameter 'providers_file_id' is set
if ('providers_file_id' not in params) or (params['providers_file_id'] is None):
raise ValueError("Missing the required parameter `providers_file_id` when calling `update_providers_file_by_id`")
resource_path = '/platform/1/auth/providers/file/{ProvidersFileId}'.replace('{format}', 'json')
path_params = {}
if 'providers_file_id' in params:
path_params['ProvidersFileId'] = params['providers_file_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'providers_file_id_params' in params:
body_params = params['providers_file_id_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_providers_krb5_by_id(self, providers_krb5_id_params, providers_krb5_id, **kwargs):
"""
Modify the KRB5 provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_providers_krb5_by_id(providers_krb5_id_params, providers_krb5_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProvidersKrb5IdParams providers_krb5_id_params: (required)
:param str providers_krb5_id: Modify the KRB5 provider. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_krb5_id_params', 'providers_krb5_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_providers_krb5_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_krb5_id_params' is set
if ('providers_krb5_id_params' not in params) or (params['providers_krb5_id_params'] is None):
raise ValueError("Missing the required parameter `providers_krb5_id_params` when calling `update_providers_krb5_by_id`")
# verify the required parameter 'providers_krb5_id' is set
if ('providers_krb5_id' not in params) or (params['providers_krb5_id'] is None):
raise ValueError("Missing the required parameter `providers_krb5_id` when calling `update_providers_krb5_by_id`")
resource_path = '/platform/3/auth/providers/krb5/{ProvidersKrb5Id}'.replace('{format}', 'json')
path_params = {}
if 'providers_krb5_id' in params:
path_params['ProvidersKrb5Id'] = params['providers_krb5_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'providers_krb5_id_params' in params:
body_params = params['providers_krb5_id_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_providers_ldap_by_id(self, providers_ldap_id_params, providers_ldap_id, **kwargs):
"""
Modify the LDAP provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_providers_ldap_by_id(providers_ldap_id_params, providers_ldap_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProvidersLdapIdParams providers_ldap_id_params: (required)
:param str providers_ldap_id: Modify the LDAP provider. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_ldap_id_params', 'providers_ldap_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_providers_ldap_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_ldap_id_params' is set
if ('providers_ldap_id_params' not in params) or (params['providers_ldap_id_params'] is None):
raise ValueError("Missing the required parameter `providers_ldap_id_params` when calling `update_providers_ldap_by_id`")
# verify the required parameter 'providers_ldap_id' is set
if ('providers_ldap_id' not in params) or (params['providers_ldap_id'] is None):
raise ValueError("Missing the required parameter `providers_ldap_id` when calling `update_providers_ldap_by_id`")
resource_path = '/platform/3/auth/providers/ldap/{ProvidersLdapId}'.replace('{format}', 'json')
path_params = {}
if 'providers_ldap_id' in params:
path_params['ProvidersLdapId'] = params['providers_ldap_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'providers_ldap_id_params' in params:
body_params = params['providers_ldap_id_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_providers_local_by_id(self, providers_local_id_params, providers_local_id, **kwargs):
"""
Modify the local provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_providers_local_by_id(providers_local_id_params, providers_local_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProvidersLocalIdParams providers_local_id_params: (required)
:param str providers_local_id: Modify the local provider. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_local_id_params', 'providers_local_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_providers_local_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_local_id_params' is set
if ('providers_local_id_params' not in params) or (params['providers_local_id_params'] is None):
raise ValueError("Missing the required parameter `providers_local_id_params` when calling `update_providers_local_by_id`")
# verify the required parameter 'providers_local_id' is set
if ('providers_local_id' not in params) or (params['providers_local_id'] is None):
raise ValueError("Missing the required parameter `providers_local_id` when calling `update_providers_local_by_id`")
resource_path = '/platform/1/auth/providers/local/{ProvidersLocalId}'.replace('{format}', 'json')
path_params = {}
if 'providers_local_id' in params:
path_params['ProvidersLocalId'] = params['providers_local_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'providers_local_id_params' in params:
body_params = params['providers_local_id_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_providers_nis_by_id(self, providers_nis_id_params, providers_nis_id, **kwargs):
"""
Modify the NIS provider.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_providers_nis_by_id(providers_nis_id_params, providers_nis_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProvidersNisIdParams providers_nis_id_params: (required)
:param str providers_nis_id: Modify the NIS provider. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['providers_nis_id_params', 'providers_nis_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_providers_nis_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'providers_nis_id_params' is set
if ('providers_nis_id_params' not in params) or (params['providers_nis_id_params'] is None):
raise ValueError("Missing the required parameter `providers_nis_id_params` when calling `update_providers_nis_by_id`")
# verify the required parameter 'providers_nis_id' is set
if ('providers_nis_id' not in params) or (params['providers_nis_id'] is None):
raise ValueError("Missing the required parameter `providers_nis_id` when calling `update_providers_nis_by_id`")
resource_path = '/platform/3/auth/providers/nis/{ProvidersNisId}'.replace('{format}', 'json')
path_params = {}
if 'providers_nis_id' in params:
path_params['ProvidersNisId'] = params['providers_nis_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'providers_nis_id_params' in params:
body_params = params['providers_nis_id_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_settings_acls(self, settings_acls, **kwargs):
"""
Modify cluster ACL policy settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_settings_acls(settings_acls, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SettingsAclsAclPolicySettings settings_acls: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['settings_acls']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_settings_acls" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'settings_acls' is set
if ('settings_acls' not in params) or (params['settings_acls'] is None):
raise ValueError("Missing the required parameter `settings_acls` when calling `update_settings_acls`")
resource_path = '/platform/3/auth/settings/acls'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'settings_acls' in params:
body_params = params['settings_acls']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_settings_global(self, settings_global, **kwargs):
"""
Modify the global settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_settings_global(settings_global, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SettingsGlobalGlobalSettings settings_global: (required)
:param str zone: Zone which contains any per-zone settings.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['settings_global', 'zone']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_settings_global" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'settings_global' is set
if ('settings_global' not in params) or (params['settings_global'] is None):
raise ValueError("Missing the required parameter `settings_global` when calling `update_settings_global`")
resource_path = '/platform/1/auth/settings/global'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'zone' in params:
query_params['zone'] = params['zone']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'settings_global' in params:
body_params = params['settings_global']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_settings_krb5_defaults(self, settings_krb5_defaults, **kwargs):
"""
Modify the krb5 settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_settings_krb5_defaults(settings_krb5_defaults, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SettingsKrb5DefaultsKrb5Settings settings_krb5_defaults: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['settings_krb5_defaults']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_settings_krb5_defaults" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'settings_krb5_defaults' is set
if ('settings_krb5_defaults' not in params) or (params['settings_krb5_defaults'] is None):
raise ValueError("Missing the required parameter `settings_krb5_defaults` when calling `update_settings_krb5_defaults`")
resource_path = '/platform/1/auth/settings/krb5/defaults'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'settings_krb5_defaults' in params:
body_params = params['settings_krb5_defaults']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_settings_krb5_domain(self, settings_krb5_domain, settings_krb5_domain_id, **kwargs):
"""
Modify the krb5 domain settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_settings_krb5_domain(settings_krb5_domain, settings_krb5_domain_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SettingsKrb5Domain settings_krb5_domain: (required)
:param str settings_krb5_domain_id: Modify the krb5 domain settings. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['settings_krb5_domain', 'settings_krb5_domain_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_settings_krb5_domain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'settings_krb5_domain' is set
if ('settings_krb5_domain' not in params) or (params['settings_krb5_domain'] is None):
raise ValueError("Missing the required parameter `settings_krb5_domain` when calling `update_settings_krb5_domain`")
# verify the required parameter 'settings_krb5_domain_id' is set
if ('settings_krb5_domain_id' not in params) or (params['settings_krb5_domain_id'] is None):
raise ValueError("Missing the required parameter `settings_krb5_domain_id` when calling `update_settings_krb5_domain`")
resource_path = '/platform/1/auth/settings/krb5/domains/{SettingsKrb5DomainId}'.replace('{format}', 'json')
path_params = {}
if 'settings_krb5_domain_id' in params:
path_params['SettingsKrb5DomainId'] = params['settings_krb5_domain_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'settings_krb5_domain' in params:
body_params = params['settings_krb5_domain']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_settings_krb5_realm(self, settings_krb5_realm, settings_krb5_realm_id, **kwargs):
"""
Modify the krb5 realm settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_settings_krb5_realm(settings_krb5_realm, settings_krb5_realm_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SettingsKrb5Realm settings_krb5_realm: (required)
:param str settings_krb5_realm_id: Modify the krb5 realm settings. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['settings_krb5_realm', 'settings_krb5_realm_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_settings_krb5_realm" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'settings_krb5_realm' is set
if ('settings_krb5_realm' not in params) or (params['settings_krb5_realm'] is None):
raise ValueError("Missing the required parameter `settings_krb5_realm` when calling `update_settings_krb5_realm`")
# verify the required parameter 'settings_krb5_realm_id' is set
if ('settings_krb5_realm_id' not in params) or (params['settings_krb5_realm_id'] is None):
raise ValueError("Missing the required parameter `settings_krb5_realm_id` when calling `update_settings_krb5_realm`")
resource_path = '/platform/1/auth/settings/krb5/realms/{SettingsKrb5RealmId}'.replace('{format}', 'json')
path_params = {}
if 'settings_krb5_realm_id' in params:
path_params['SettingsKrb5RealmId'] = params['settings_krb5_realm_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'settings_krb5_realm' in params:
body_params = params['settings_krb5_realm']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_settings_mapping(self, settings_mapping, **kwargs):
"""
Modify the mapping settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_settings_mapping(settings_mapping, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SettingsMappingMappingSettings settings_mapping: (required)
:param str zone: Access zone which contains mapping settings.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['settings_mapping', 'zone']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_settings_mapping" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'settings_mapping' is set
if ('settings_mapping' not in params) or (params['settings_mapping'] is None):
raise ValueError("Missing the required parameter `settings_mapping` when calling `update_settings_mapping`")
resource_path = '/platform/1/auth/settings/mapping'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'zone' in params:
query_params['zone'] = params['zone']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'settings_mapping' in params:
body_params = params['settings_mapping']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| 39.329658 | 236 | 0.546373 | 26,553 | 271,060 | 5.353595 | 0.016345 | 0.043052 | 0.023594 | 0.022722 | 0.944328 | 0.934655 | 0.925095 | 0.912918 | 0.90466 | 0.896246 | 0 | 0.002499 | 0.371062 | 271,060 | 6,891 | 237 | 39.335365 | 0.831347 | 0.25933 | 0 | 0.852675 | 0 | 0.000549 | 0.187391 | 0.042106 | 0 | 0 | 0 | 0 | 0 | 1 | 0.023594 | false | 0 | 0.004115 | 0 | 0.051303 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9e6427603d216d64b3550333b5481bb214a7d8e4 | 85 | py | Python | bluetail/models/__init__.py | KhadijaMahanga/bluetail | 9614917a07a63d290d7fd7f68c6c488efbdcd2b0 | [
"MIT"
] | 10 | 2021-04-08T06:39:50.000Z | 2022-03-19T15:15:11.000Z | bluetail/models/__init__.py | KhadijaMahanga/bluetail | 9614917a07a63d290d7fd7f68c6c488efbdcd2b0 | [
"MIT"
] | 20 | 2020-06-08T11:23:11.000Z | 2021-06-08T21:51:14.000Z | bluetail/models/__init__.py | KhadijaMahanga/bluetail | 9614917a07a63d290d7fd7f68c6c488efbdcd2b0 | [
"MIT"
] | 4 | 2020-10-09T15:02:10.000Z | 2021-09-16T16:57:25.000Z | from .bods_models import *
from .ocds_models import *
from .bluetail_models import *
| 21.25 | 30 | 0.788235 | 12 | 85 | 5.333333 | 0.5 | 0.5625 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.141176 | 85 | 3 | 31 | 28.333333 | 0.876712 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
9e7c914583b385c718997b43a636fff88b8c8bac | 94 | py | Python | ansys/mapdl/core/_commands/reduced/__init__.py | da1910/pymapdl | 305b70b30e61a78011e974ff4cb409ee21f89e13 | [
"MIT"
] | 194 | 2016-10-21T08:46:41.000Z | 2021-01-06T20:39:23.000Z | ansys/mapdl/core/_commands/reduced/__init__.py | da1910/pymapdl | 305b70b30e61a78011e974ff4cb409ee21f89e13 | [
"MIT"
] | 463 | 2021-01-12T14:07:38.000Z | 2022-03-31T22:42:25.000Z | ansys/mapdl/core/_commands/reduced/__init__.py | da1910/pymapdl | 305b70b30e61a78011e974ff4cb409ee21f89e13 | [
"MIT"
] | 66 | 2016-11-21T04:26:08.000Z | 2020-12-28T09:27:27.000Z | from . import generation
from . import preparation
from . import setup
from . import use_pass
| 18.8 | 25 | 0.787234 | 13 | 94 | 5.615385 | 0.538462 | 0.547945 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.170213 | 94 | 4 | 26 | 23.5 | 0.935897 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.25 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
7b65313deea5592dd893a89f74e1d5dd409737a1 | 285,049 | py | Python | tests/image_test.py | Synchronicity89/CV | 14f96f5ac0f11922ba4c6cbb83c4c301f6ce0eae | [
"Apache-2.0"
] | null | null | null | tests/image_test.py | Synchronicity89/CV | 14f96f5ac0f11922ba4c6cbb83c4c301f6ce0eae | [
"Apache-2.0"
] | null | null | null | tests/image_test.py | Synchronicity89/CV | 14f96f5ac0f11922ba4c6cbb83c4c301f6ce0eae | [
"Apache-2.0"
] | null | null | null | """
Copyright(c), Google, LLC (Andrew Ferlitsch)
Copyright(c), Virtualdvid (David Molina)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import sys
import time
import unittest
from shutil import copy, rmtree
import cv2
import numpy as np
import pytest
from gapcv.vision import Image, Images
class MyTest(unittest.TestCase):
""" My Test """
def setup_class(self):
""" Setup Class"""
pass
def teardown_class(self):
""" Teardown Class """
pass
### Images
def test_001(self):
""" Images Constructor - no arguments """
images = Images()
self.assertEqual(images.images, None)
self.assertEqual(images.labels, None)
self.assertEqual(len(images), 0)
self.assertEqual(images.count, 0)
self.assertEqual(images.dir, './')
self.assertEqual(images.name, 'unnamed')
self.assertEqual(images.time, 0)
self.assertEqual(images.elapsed, "00:00:00")
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.dtype, np.float32)
self.assertEqual(images.shape, (0,))
self.assertEqual(images.classes, None)
self.assertEqual(images.author, '')
self.assertEqual(images.src, '')
self.assertEqual(images.desc, '')
self.assertEqual(images.license, '')
def test_002(self):
""" Images Constructor - no images, config/store """
images = Images(config=['store'])
def test_003(self):
""" Images Constructor - no images, _dir argument """
images = Images(_dir='./tmp')
self.assertEqual(images.dir, './tmp/')
with pytest.raises(TypeError):
images = Images(_dir=2)
def test_004(self):
""" Images Constructor - no images, _dir property """
images = Images()
images.dir = './tmp'
self.assertEqual(images.dir, './tmp/')
images.dir = './tmp2/'
self.assertEqual(images.dir, './tmp2/')
with pytest.raises(TypeError):
images.dir = 2
rmtree('tmp')
rmtree('tmp2')
def test_005(self):
""" Images Constructor - no images, name argument """
images = Images(name='foo')
self.assertEqual(images.name, 'foo')
with pytest.raises(TypeError):
images = Images(name=2)
def test_006(self):
""" Images Constructor - no images, name property """
images = Images()
images.name = 'foo'
self.assertEqual(images.name, 'foo')
with pytest.raises(TypeError):
images.name = 2
def test_007(self):
""" Images Constructor - no images, config argument """
images = Images(config=None)
images = Images(config=[])
with pytest.raises(TypeError):
images = Images(config=7)
with pytest.raises(TypeError):
images = Images(config='7')
with pytest.raises(AttributeError):
images = Images(config=[7])
with pytest.raises(AttributeError):
images = Images(config=['foo'])
images = Images(config=['flat'])
images = Images(config=['flatten'])
images = Images(config=['gray'])
images = Images(config=['grayscale'])
images = Images(config=['store'])
images = Images(config=['stream'])
images = Images(config=['header'])
images = Images(config=['uint8'])
self.assertEqual(images.dtype, np.uint8)
images = Images(config=['uint16'])
self.assertEqual(images.dtype, np.uint16)
images = Images(config=['float16'])
self.assertEqual(images.dtype, np.float16)
images = Images(config=['float32'])
self.assertEqual(images.dtype, np.float32)
images = Images(config=['float64'])
self.assertEqual(images.dtype, np.float64)
images = Images(config=['license='])
self.assertEqual(images.license, '')
images = Images(config=['src='])
self.assertEqual(images.src, '')
images = Images(config=['author='])
self.assertEqual(images.author, '')
images = Images(config=['desc='])
self.assertEqual(images.desc, '')
with pytest.raises(AttributeError):
images = Images(config=['resize='])
with pytest.raises(AttributeError):
images = Images(config=['resize=()'])
with pytest.raises(AttributeError):
images = Images(config=['resize=(1,)'])
with pytest.raises(AttributeError):
images = Images(config=['resize=(0,3)'])
with pytest.raises(AttributeError):
images = Images(config=['resize=(3,0)'])
with pytest.raises(AttributeError):
images = Images(config=['resize=(-1,3)'])
with pytest.raises(AttributeError):
images = Images(config=['resize=(3,-1)'])
images = Images(config=['resize=(10,20)'])
with pytest.raises(AttributeError):
images = Images(config=['norm='])
with pytest.raises(AttributeError):
images = Images(config=['normalization='])
with pytest.raises(AttributeError):
images = Images(config=['norm=2'])
images = Images(config=['norm=pos'])
images = Images(config=['norm=zero'])
images = Images(config=['norm=std'])
with pytest.raises(AttributeError):
images = Images(config=['image_col='])
with pytest.raises(AttributeError):
images = Images(config=['image_col=A'])
with pytest.raises(AttributeError):
images = Images(config=['image_col=-1'])
images = Images(config=['image_col=0'])
with pytest.raises(AttributeError):
images = Images(config=['label_col='])
with pytest.raises(AttributeError):
images = Images(config=['label_col=A'])
with pytest.raises(AttributeError):
images = Images(config=['label_col=-1'])
images = Images(config=['label_col=0'])
with pytest.raises(AttributeError):
images = Images(config=['sep='])
images = Images(config=['sep=A'])
with pytest.raises(AttributeError):
images = Images(config=['image_key='])
images = Images(config=['image_key=A'])
with pytest.raises(AttributeError):
images = Images(config=['label_key='])
images = Images(config=['label_key=B'])
def test_008(self):
""" Images Constructor - no images, labels argument """
images = Images(labels=1)
images = Images(labels=[1])
images = Images(labels=np.asarray([1]))
images = Images(labels='cats')
images = Images(labels=['cats', 'dogs'])
with pytest.raises(TypeError):
images = Images(labels=3.2)
with pytest.raises(AttributeError):
images = Images(labels=[])
with pytest.raises(TypeError):
images = Images(labels=[3.2])
with pytest.raises(TypeError):
images = Images(labels=np.asarray([1.6]))
def dummy(self):
""" Dummy """
pass
def test_009(self):
""" Images Constructor - no images, ehandler argument """
images = Images(ehandler=self.dummy)
images = Images(ehandler=(self.dummy, 6))
with pytest.raises(TypeError):
images = Images(ehandler=1)
with pytest.raises(TypeError):
images = Images(ehandler=(1, 2))
def test_010(self):
""" Images - directory - bad arguments """
with pytest.raises(OSError):
images = Images('foo', 'noexist_dir')
with pytest.raises(OSError):
images = Images('foo', 'func_test.py')
def test_011(self):
""" Images - CSV - bad arguments """
with pytest.raises(OSError):
images = Images('foo', 'noexist.csv', config=['image_col=0', 'label_col=0'])
with pytest.raises(OSError):
images = Images('foo', 'http://noexist.csv', config=['image_col=0', 'label_col=0'])
with pytest.raises(OSError):
images = Images('foo', 'https://noexist.csv', config=['image_col=0', 'label_col=0'])
f = open('files/empty.csv', 'w')
f.close()
with pytest.raises(AttributeError):
images = Images('foo', 'files/empty.csv', config=['image_col=-1', 'label_col=0'])
with pytest.raises(AttributeError):
images = Images('foo', 'files/empty.csv', config=['image_col=A', 'label_col=0'])
with pytest.raises(AttributeError):
images = Images('foo', 'files/empty.csv', config=['image_col=A', 'label_col=0'])
with pytest.raises(AttributeError):
images = Images('foo', 'files/empty.csv', config=['label_col=-1', 'image_col=0'])
with pytest.raises(AttributeError):
images = Images('foo', 'files/empty.csv', config=['label_col=A', 'image_col=0'])
with pytest.raises(AttributeError):
images = Images('foo', 'files/empty.csv', config=['label_col=', 'image_col=0'])
with pytest.raises(AttributeError):
images = Images('foo', 'files/empty.csv', config=['image_col=0'])
with pytest.raises(AttributeError):
images = Images('foo', 'files/empty.csv', config=['label_col=0'])
with pytest.raises(ValueError):
images = Images('foo', 'files/empty.csv', config=['label_col=0', 'image_col=0'])
f = open('files/empty.csv', 'w')
f.write('1,2\n')
f.close()
with pytest.raises(IndexError):
images = Images('foo', 'files/empty.csv', config=['label_col=2', 'image_col=0'])
with pytest.raises(IndexError):
images = Images('foo', 'files/empty.csv', config=['label_col=0', 'image_col=2'])
f = open('files/empty.csv', 'w')
f.write('header,header\n')
f.write('1,2\n')
f.close()
with pytest.raises(IndexError):
images = Images('foo', 'files/empty.csv',
config=['label_col=2', 'image_col=0', 'header'])
with pytest.raises(IndexError):
images = Images('foo', 'files/empty.csv',
config=['label_col=0', 'image_col=2', 'header'])
os.remove('files/empty.csv')
def test_012(self):
""" Images - JSON - bad arguments """
# non-exist file
with pytest.raises(OSError):
images = Images('foo', 'noexist.json', config=['image_key=image', 'label_key=label'])
with pytest.raises(OSError):
images = Images('foo', 'http://noexist.json',
config=['image_key=image', 'label_key=label'])
with pytest.raises(OSError):
images = Images('foo', 'https://noexist.json',
config=['image_key=image', 'label_key=label'])
# missing arguments
f = open('files/empty.json', 'w')
f.close()
with pytest.raises(AttributeError):
images = Images('foo', 'files/empty.json', config=[])
with pytest.raises(AttributeError):
images = Images('foo', 'files/empty.json', config=['image_key=image'])
with pytest.raises(AttributeError):
images = Images('foo', 'files/empty.json', config=['label_key=key'])
with pytest.raises(ValueError):
images = Images('foo', 'files/empty.json',
config=['image_key=image', 'label_key=image'])
# bad format
f = open('files/empty.json', 'w')
f.write('{"0"\n')
f.close()
with pytest.raises(OSError):
images = Images('foo', 'files/empty.json',
config=['image_key=image', 'label_key=label'])
# missing keys
f = open('files/test.json', 'w')
f.write("[")
f.write('{"label": 0, "image": "files/1.jpg"},\n')
f.write('{"label": 0, "image": "files/2.jpg"},\n')
f.write('{"label": 0, "image": "files/3.jpg"}\n')
f.write("]")
f.close()
with pytest.raises(IndexError):
images = Images('foo', 'files/test.json', config=['image_key=image', 'label_key=foo'])
with pytest.raises(IndexError):
images = Images('foo', 'files/test.json', config=['image_key=foo', 'label_key=label'])
os.remove('files/test.json')
os.remove('files/empty.json')
def test_013(self):
""" Images - list - bad arguments """
# no images, no labels
with pytest.raises(TypeError):
images = Images('foo', [])
# mismatch in number of labels
with pytest.raises(AttributeError):
images = Images('foo', ['a'], [1, 2])
def test_014(self):
""" Images - memory - bad arguments """
# no images, no labels
memory = np.asarray([])
with pytest.raises(TypeError):
images = Images('foo', memory)
# mismatch in number of labels
memory = np.asarray([[1]])
with pytest.raises(AttributeError):
images = Images('foo', memory, [1, 2])
def test_015(self):
""" Images - directory - no images """
if os.path.isdir('files/empty'):
rmtree('files/empty')
os.mkdir('files/empty')
images = Images('foo', 'files/empty')
self.assertEqual(len(images), 0)
self.assertEqual(images.fail, 0)
self.assertEqual(images.classes, {})
self.assertEqual(images.labels, [])
self.assertEqual(images.images, [])
self.assertEqual(images.shape, (128, 128))
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 0)
f = open('files/empty/foo.txt', 'w+')
f.close()
images = Images('foo', 'files/empty')
self.assertEqual(len(images), 0)
self.assertEqual(images.fail, 0)
self.assertEqual(images.classes, {})
self.assertEqual(len(images.labels), 0)
self.assertEqual(images.images, [])
self.assertEqual(images.shape, (128, 128))
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 0)
if not os.path.isdir('files/empty/tmp1'):
os.mkdir('files/empty/tmp1')
if not os.path.isdir('files/empty/tmp2'):
os.mkdir('files/empty/tmp2')
images = Images('foo', 'files/empty')
self.assertEqual(len(images), 0)
self.assertEqual(images.fail, 0)
self.assertEqual(images.classes, {})
self.assertEqual(len(images.labels), 0)
self.assertEqual(images.images, [])
self.assertEqual(images.shape, (128, 128))
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 0)
if not os.path.isdir('files/empty/.tmp'):
os.mkdir('files/empty/.tmp')
f = open('files/empty/.tmp/1.jpg', 'w+')
f.close()
images = Images('foo', 'files/empty')
self.assertEqual(len(images), 0)
self.assertEqual(images.fail, 0)
self.assertEqual(images.classes, {})
self.assertEqual(len(images.labels), 0)
self.assertEqual(images.images, [])
self.assertEqual(images.shape, (128, 128))
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 0)
images = Images('foo', 'files/empty', config=['store'])
self.assertTrue(os.path.isfile("foo.h5"))
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(len(images), 0)
self.assertEqual(images.classes, {})
self.assertEqual(images.count, 0)
self.assertEqual(images.images, [])
self.assertEqual(len(images.labels), 0)
os.remove('foo.h5')
# line 57 only works if name=None
images = Images(None, 'files/empty', config=['store'])
self.assertTrue(os.path.isfile("files/empty.h5"))
images = Images()
images.load('files/empty')
self.assertEqual(images.name, 'files/empty')
self.assertEqual(len(images), 0)
self.assertEqual(images.classes, {})
self.assertEqual(images.count, 0)
self.assertEqual(images.images, [])
self.assertEqual(len(images.labels), 0)
os.remove('files/empty.h5')
images = Images(images='files/empty', config=['store'])
self.assertTrue(os.path.isfile("unnamed.h5"))
images = Images()
images.load('unnamed')
self.assertEqual(images.name, 'unnamed')
self.assertEqual(len(images), 0)
self.assertEqual(images.classes, {})
self.assertEqual(images.count, 0)
self.assertEqual(images.images, [])
self.assertEqual(len(images.labels), 0)
images = Images()
images.load()
self.assertEqual(images.name, 'unnamed')
self.assertEqual(len(images), 0)
self.assertEqual(images.classes, {})
self.assertEqual(images.count, 0)
self.assertEqual(images.images, [])
self.assertEqual(len(images.labels), 0)
rmtree('files/empty')
def test_016(self):
""" Images - load - bad arguments """
images = Images()
with pytest.raises(ValueError):
images.load(None)
with pytest.raises(TypeError):
images.load(1)
with pytest.raises(TypeError):
images = Images(_dir=None)
images = Images()
images.load(_dir='./')
self.assertEqual(images.dir, './')
os.remove('unnamed.h5')
def test_017(self):
""" Images - directory - bad images """
if os.path.isdir('files/bad/tmp1'):
rmtree('files/bad')
os.mkdir('files/bad')
os.mkdir('files/bad/tmp1')
f = open('files/bad/tmp1/1.jpg', 'w+')
f.close()
images = Images('foo', 'files/bad')
self.assertEqual(images.fail, 1)
self.assertEqual(images.classes, {'tmp1': 0})
self.assertEqual(len(images.labels), 1)
self.assertEqual(len(images.labels[0]), 0)
self.assertEqual(len(images), 1)
self.assertEqual(len(images[0]), 0)
self.assertEqual(images.count, 0)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.errors), 1)
rmtree('files/bad')
def test_018(self):
""" Images - attributes """
if os.path.isdir('files/empty'):
rmtree('files/empty')
os.mkdir('files/empty')
images = Images('foo', 'files/empty',
config=['store', 'author=andy', 'license=2.0', 'desc=any', 'src=mysrc'])
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.author, 'andy')
self.assertEqual(images.license, '2.0')
self.assertEqual(images.desc, 'any')
self.assertEqual(images.src, 'mysrc')
self.assertEqual(images.time, 0)
os.rmdir('files/empty')
def test_019(self):
""" Images - directory - single class """
if os.path.isdir('files/root'):
rmtree('files/root')
os.mkdir('files/root')
os.mkdir('files/root/tmp1')
copy('files/1.jpg', 'files/root/tmp1')
copy('files/2.jpg', 'files/root/tmp1')
images = Images('foo', 'files/root', config=['store'])
self.assertEqual(images.classes, {'tmp1': 0})
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.shape, (128, 128))
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[0][1], 0)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
self.assertTrue(images.time > 0)
# load, store
images = Images()
images.load('foo')
self.assertEqual(images.classes, {'tmp1': 0})
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.shape, (128, 128))
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[0][1], 0)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
self.assertTrue(images.time > 0)
# stream
images = Images('foo', 'files/root', config=['stream'])
self.assertEqual(images.classes, {'tmp1': 0})
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.shape, (128, 128))
self.assertEqual(images.count, 2)
self.assertTrue(images.time > 0)
# load, stream
images = Images()
images.load('foo')
self.assertEqual(images.classes, {'tmp1': 0})
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.shape, (128, 128))
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[0][1], 0)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
self.assertTrue(images.time > 0)
# error
f = open('files/root/tmp1/bad.jpg', 'w+')
f.close()
images = Images('foo', 'files/root', config=['store'])
self.assertEqual(images.classes, {'tmp1': 0})
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[0][1], 0)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
self.assertTrue(images.time > 0)
# load, error, store
images = Images()
images.load('foo')
self.assertEqual(images.classes, {'tmp1': 0})
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[0][1], 0)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
self.assertTrue(images.time > 0)
# error, stream
images = Images('foo', 'files/root', config=['stream'])
self.assertEqual(images.classes, {'tmp1': 0})
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(images.count, 2)
self.assertTrue(images.time > 0)
# load, error, stream
images = Images()
images.load('foo')
self.assertEqual(images.classes, {'tmp1': 0})
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[0][1], 0)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
self.assertTrue(images.time > 0)
# stream height != width
images = Images('foo', 'files/root', config=['stream', 'resize=(50,40)'])
self.assertEqual(images.classes, {'tmp1': 0})
self.assertEqual(images.fail, 1)
self.assertEqual(images.shape, (50, 40))
self.assertEqual(images.count, 2)
self.assertTrue(images.time > 0)
# load, stream, height != width
images = Images()
images.load('foo')
self.assertEqual(images.classes, {'tmp1': 0})
self.assertEqual(images.fail, 1)
self.assertEqual(images.shape, (50, 40))
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[0][1], 0)
self.assertEqual(images.images[0].shape, (2, 50, 40, 3))
self.assertTrue(images.time > 0)
rmtree('files/root')
os.remove('foo.h5')
def test_020(self):
""" Images - directory - shape on flatten and resize """
if os.path.isdir('files/root'):
rmtree('files/root')
os.mkdir('files/root')
os.mkdir('files/root/tmp1')
copy('files/1.jpg', 'files/root/tmp1')
copy('files/2.jpg', 'files/root/tmp1')
images = Images('foo', 'files/root', config=['store', 'resize=(50,50)', 'flatten'])
self.assertEqual(images.shape, (50, 50))
self.assertEqual(images.images[0][0].shape, (7500,))
images = Images('foo', 'files/root', config=['store', 'resize=(30,50)', 'flatten'])
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.images[0][0].shape, (4500,))
images = Images('foo', 'files/root', config=['store', 'resize=(30,50)', 'flatten', 'gray'])
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.images[0][0].shape, (1500,))
rmtree('files/root')
os.remove('foo.h5')
def test_021(self):
""" Images - directory - multi class """
if os.path.isdir('files/root'):
rmtree('files/root')
os.mkdir('files/root')
os.mkdir('files/root/tmp1')
os.mkdir('files/root/tmp2')
copy('files/1.jpg', 'files/root/tmp1')
copy('files/2.jpg', 'files/root/tmp1')
copy('files/3.jpg', 'files/root/tmp2')
images = Images('foo', 'files/root', config=['store', 'resize=(50,50)'])
self.assertEqual(images.classes, {'tmp1': 0, 'tmp2': 1})
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.shape, (50, 50))
self.assertEqual(images.count, 3)
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[0][1], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (2, 50, 50, 3))
self.assertEqual(images.images[1].shape, (1, 50, 50, 3))
images = Images('foo', 'files/root', config=['store', 'resize=(50,50)', 'gray', 'flat'])
self.assertEqual(images.classes, {'tmp1': 0, 'tmp2': 1})
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.shape, (50, 50))
self.assertEqual(images.count, 3)
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[0][1], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (2, 2500))
self.assertEqual(images.images[1].shape, (1, 2500))
rmtree('files/root')
os.remove('foo.h5')
def test_022(self):
""" Images - bad dir, free h5 file """
with pytest.raises(OSError):
images = Images('foo', 'files/nodir',
config=['store', 'resize=(50,50)', 'gray', 'flat'])
os.remove('foo.h5')
def test_023(self):
""" Images - memory - no images """
memory = np.asarray([])
images = Images('foo', memory, 1)
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 0)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 0)
self.assertEqual(len(images.labels), 0)
self.assertEqual(images.classes, {'1': 0})
def test_024(self):
""" Images - memory - 1D - no store """
# single image - same size as resize
memory = np.asarray([[1]])
images = Images('foo', memory, 1, config=['resize=(1,1)'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (1, 1))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(images.labels[0][0], 0)
# flattened to unflattened
a = cv2.imread('files/1.jpg', cv2.IMREAD_GRAYSCALE)
b = cv2.resize(a, (50, 50), interpolation=cv2.INTER_AREA).flatten()
images = Images('foo', [b], 1, config=['resize=(50,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(images.labels[0][0], 0)
# multiple flattened images
c = np.asarray([b, b, b])
images = Images('foo', c, 1, config=['resize=(50,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.classes, {'1': 0})
d = np.asarray([b for _ in range(1000)])
images = Images('foo', d, 1, config=['resize=(50,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1000)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(len(images.images[0]), 1000)
self.assertEqual(len(images.labels[0]), 1000)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.classes, {'1': 0})
self.assertTrue(images.time > 0)
self.assertEqual(images.images[0][0].shape, (50, 50))
# multiple flattened images - flatten
c = np.asarray([b, b, b])
images = Images('foo', c, 1, config=['resize=(30,50)', 'flat'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(images.images[0][0].shape, (1500,))
# multiple flattened images - list is one value
c = np.asarray([b, b, b])
images = Images('foo', c, [0, 0, 0], config=['resize=(30,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.classes, {'0': 0})
self.assertEqual(images.images[0][0].shape, (30, 50))
# multiple flattened images - list different values
c = np.asarray([b, b, b, b, b])
images = Images('foo', c, [0, 1, 0, 1, 1], config=['resize=(30,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(images.images[0][0].shape, (30, 50))
# gray
c = np.asarray([a, a, a, a, a])
images = Images('foo', c, [0, 1, 0, 1, 1], config=['resize=(30,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(images.images[0][0].shape, (30, 50, 3))
images = Images('foo', c, [0, 1, 0, 1, 1], config=['resize=(30,50)', 'flat'])
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(images.images[0][0].shape, (4500,))
images = Images('foo', c, [0, 1, 0, 1, 1], config=['resize=(30,50)', 'flat', 'gray'])
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(images.images[0][0].shape, (1500,))
def test_025(self):
""" Images - memory - 2D - no store """
a = cv2.imread('files/1.jpg', cv2.IMREAD_COLOR)
c = np.asarray([a, a, a, a, a])
images = Images('foo', c, [0, 1, 0, 1, 1], config=['resize=(30,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(images.images[0][0].shape, (30, 50, 3))
images = Images('foo', c, [0, 1, 0, 1, 1], config=['resize=(30,50)', 'flat'])
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(images.images[0][0].shape, (4500,))
images = Images('foo', c, [0, 1, 0, 1, 1], config=['resize=(30,50)', 'flat', 'gray'])
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(images.images[0][0].shape, (1500,))
def test_026(self):
""" Images - memory - 3D - no store """
a = cv2.imread('files/1.jpg', cv2.IMREAD_COLOR)
b = np.asarray([['a']])
memory = np.asarray([a])
images = Images('foo', memory, 1, config=['resize=(30,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(images.images[0][0].shape, (30, 50, 3))
# load bad image from memory
memory = np.asarray([b])
images = Images('foo', memory, 1, config=['resize=(30,50)'])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 0)
self.assertEqual(len(images.images), 0)
self.assertEqual(len(images.labels), 0)
self.assertEqual(images.classes, {'1': 0})
# load None image from memory
memory = np.asarray([None])
images = Images('foo', memory, 1, config=['resize=(30,50)'])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 0)
self.assertEqual(len(images.images), 0)
self.assertEqual(len(images.labels), 0)
self.assertEqual(images.classes, {'1': 0})
# load several images two with errors
memory = np.asarray([None, a, b])
images = Images('foo', memory, 1, config=['resize=(30,50)'])
self.assertEqual(images.fail, 2)
self.assertEqual(len(images.errors), 2)
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(images.images[0][0].shape, (30, 50, 3))
def test_027(self):
""" Images - memory - store """
# one class
a = cv2.imread('files/1.jpg', cv2.IMREAD_COLOR)
c = np.asarray([a, a, a, a, a])
images = Images('foo', c, 0, config=['resize=(30,50)', 'store'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'0': 0})
self.assertEqual(images.images[0][0].shape, (30, 50, 3))
self.assertEqual(images.labels[0][0], 0)
# one class, load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'0': 0})
self.assertEqual(images.images[0][0].shape, (30, 50, 3))
self.assertEqual(images.labels[0][0], 0)
# multi-class
images = Images('foo', c, [0, 1, 0, 1, 1], config=['resize=(30,50)', 'store'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(images.images[0][0].shape, (30, 50, 3))
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
# multi class, load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(images.images[0][0].shape, (30, 50, 3))
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
os.remove('foo.h5')
def test_028(self):
""" Images - memory - stream """
# one class, stream
a = cv2.imread('files/1.jpg', cv2.IMREAD_COLOR)
c = np.asarray([a])
images = Images('foo', c, 0, config=['resize=(30,50)', 'stream'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'0': 0})
# one class, stream, load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'0': 0})
self.assertEqual(images.images[0][0].shape, (30, 50, 3))
self.assertEqual(images.labels[0][0], 0)
# one class, gray, stream
a = cv2.imread('files/1.jpg', cv2.IMREAD_GRAYSCALE)
c = np.asarray([a, a, a, a])
images = Images('foo', c, 0, config=['resize=(30,50)', 'stream', 'gray'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'0': 0})
# one class, gray, stream, load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'0': 0})
self.assertEqual(images.images[0][0].shape, (30, 50))
self.assertEqual(images.labels[0][0], 0)
# multi class, stream
c = np.asarray([a, a, a, a, a])
images = Images('foo', c, [0, 1, 1, 0, 1], config=['resize=(30,50)', 'stream'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'0': 0, '1': 1})
# multi class, stream, load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(images.images[0][0].shape, (30, 50, 3))
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
os.remove('foo.h5')
def test_029(self):
""" Images - memory - labels as strings """
# single class, label is string
a = cv2.imread('files/1.jpg', cv2.IMREAD_COLOR)
c = np.asarray([a])
images = Images('foo', c, 'cat', config=['resize=(30,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
# single class, label is string, store
c = np.asarray([a, a, a, a])
images = Images('foo', c, 'cat', config=['resize=(30,50)', 'store'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
# single class, label is string, load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
# single class, label is string, stream
c = np.asarray([a, a, a, a])
images = Images('foo', c, 'cat', config=['resize=(30,50)', 'stream'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
# single class, label is string, load, stream
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
os.remove('foo.h5')
def test_030(self):
""" Images - memory - labels as [strings] """
# single class, label is [string]
a = cv2.imread('files/1.jpg', cv2.IMREAD_COLOR)
c = np.asarray([a])
images = Images('foo', c, ['cat'], config=['resize=(30,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
# single class, label is [string], store
c = np.asarray([a])
images = Images('foo', c, ['cat'], config=['resize=(30,50)', 'store'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
# single class, label is [string], load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
# single class, label is [string], stream
c = np.asarray([a])
images = Images('foo', c, ['cat'], config=['resize=(30,50)', 'stream'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
# single class, label is [string], load, stream
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.labels[0][0], 0)
# multi class, label is [string]
a = cv2.imread('files/1.jpg', cv2.IMREAD_COLOR)
c = np.asarray([a, a, a, a, a])
images = Images('foo', c, ['cat', 'dog', 'cat', 'cat', 'dog'], config=['resize=(30,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.images[0]), 3)
else:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.images[0]), 3)
# multi class, label is [string], store
a = cv2.imread('files/1.jpg', cv2.IMREAD_COLOR)
c = np.asarray([a, a, a, a, a])
images = Images('foo', c, ['cat', 'dog', 'cat', 'cat', 'dog'],
config=['resize=(30,50)', 'store'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(len(images.images[0]), 2)
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.images[0]), 3)
# multi class, label is [string], store/load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(len(images.images[0]), 2)
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.images[0]), 3)
# multi class, label is [string], stream
a = cv2.imread('files/1.jpg', cv2.IMREAD_COLOR)
c = np.asarray([a, a, a, a, a])
images = Images('foo', c, ['cat', 'dog', 'cat', 'cat', 'dog'],
config=['resize=(30,50)', 'stream'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.labels[0]), 2)
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
else:
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.labels[0]), 3)
# multi class, label is [string], stream/load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(len(images.images[0]), 2)
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
os.remove('foo.h5')
def test_031(self):
""" Images - list - no images """
images = Images('foo', [], 1)
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 0)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 0)
self.assertEqual(len(images.labels), 0)
self.assertEqual(images.time, 0)
self.assertEqual(images.classes, [])
def test_032(self):
""" Images - list - nonexist-image """
# single non-exist local image
images = Images('foo', ['noimage.jpg'], 1)
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 0)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 0)
self.assertEqual(len(images.labels), 0)
self.assertEqual(images.classes, {'1': 0})
# one good, one bad image
images = Images('foo', ['files/1.jpg', 'noimage.jpg'], 1)
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(images.labels[0][0], 0)
# single non-exist remote image
images = Images('foo', ['http://foobar17.com/noimage.jpg'], 1)
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 0)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 0)
self.assertEqual(len(images.labels), 0)
self.assertEqual(images.classes, {'1': 0})
def test_033(self):
""" Images - list local files """
# single class
images = Images('foo', ['files/1.jpg', 'files/2.jpg'], 1)
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
# single class, store
images = Images('foo', ['files/1.jpg', 'files/2.jpg'], 1,
config=['store', 'resize=40,50'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 40, 50, 3))
# single class, store, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 40, 50, 3))
# single class, stream
images = Images('foo', ['files/1.jpg', 'files/2.jpg'], 1,
config=['stream', 'resize=40,50', 'gray'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
# single class, stream, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 40, 50))
# multi-class
images = Images('foo', ['files/1.jpg', 'files/2.jpg', 'files/3.jpg'], [0, 1, 1])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (1, 128, 128, 3))
self.assertEqual(images.images[1].shape, (2, 128, 128, 3))
# multi-class, store
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 1, 1],
config=['store'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (1, 128, 128, 3))
self.assertEqual(images.images[1].shape, (2, 128, 128, 3))
# multi-class, store, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (1, 128, 128, 3))
self.assertEqual(images.images[1].shape, (2, 128, 128, 3))
# multi-class, stream
images = Images('foo', ['files/1.jpg', 'files/2.jpg', 'files/3.jpg'], [0, 1, 1],
config=['stream', 'flat', 'resize=50,50'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
# multi-class, stream, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (1, 7500))
self.assertEqual(images.images[1].shape, (2, 7500))
def test_034(self):
""" Images - remote files """
IMAGE1 = 'https://assets.pernod-ricard.com/uk/media_images/test.jpg'
IMAGE2 = 'https://www.accesshq.com/workspace/images/articles/test-your-technology.jpg'
# single class
images = Images('foo', [IMAGE1, IMAGE2], 1)
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
# single class, store
images = Images('foo', [IMAGE1, IMAGE2], 1, config=['store', 'resize=40,50'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 40, 50, 3))
# single class, store, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 40, 50, 3))
# single class, stream
images = Images('foo', [IMAGE1, IMAGE2], 1, config=['stream', 'resize=40,50', 'gray'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
# single class, stream, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 40, 50))
# multi-class
images = Images('foo', [IMAGE1, IMAGE2, IMAGE1], [0, 1, 1])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (1, 128, 128, 3))
self.assertEqual(images.images[1].shape, (2, 128, 128, 3))
# multi-class, store
images = Images('foo', [IMAGE1, IMAGE2, IMAGE1], [0, 1, 1], config=['store'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (1, 128, 128, 3))
self.assertEqual(images.images[1].shape, (2, 128, 128, 3))
# multi-class, store, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (1, 128, 128, 3))
self.assertEqual(images.images[1].shape, (2, 128, 128, 3))
# multi-class, stream
images = Images('foo', [IMAGE1, IMAGE2, IMAGE1], [0, 1, 1],
config=['stream', 'flat', 'resize=50,50'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
# multi-class, stream, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (1, 7500))
self.assertEqual(images.images[1].shape, (2, 7500))
def test_035(self):
""" Images - list - memory """
a = cv2.imread('files/1.jpg', cv2.IMREAD_COLOR)
# single class
images = Images('foo', [a, a], 1)
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
# single class, store
images = Images('foo', [a, a], 1, config=['store', 'resize=40,50'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 40, 50, 3))
# single class, store, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 40, 50, 3))
# single class, stream
images = Images('foo', [a, a], 1, config=['stream', 'resize=40,50', 'gray'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
# single class, stream, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'1': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 40, 50))
# multi-class
images = Images('foo', [a, a, a], [0, 1, 1])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (1, 128, 128, 3))
self.assertEqual(images.images[1].shape, (2, 128, 128, 3))
# multi-class, store
images = Images('foo', [a, a, a], [0, 1, 1], config=['store'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (1, 128, 128, 3))
self.assertEqual(images.images[1].shape, (2, 128, 128, 3))
# multi-class, store, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (1, 128, 128, 3))
self.assertEqual(images.images[1].shape, (2, 128, 128, 3))
# multi-class, stream
images = Images('foo', [a, a, a], [0, 1, 1], config=['stream', 'flat', 'resize=50,50'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
# multi-class, stream, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
self.assertEqual(images.images[0].shape, (1, 7500))
self.assertEqual(images.images[1].shape, (2, 7500))
def test_036(self):
""" Images - list - list as [string] """
a = cv2.imread('files/1.jpg', cv2.IMREAD_COLOR)
# single class
images = Images('foo', [a, a], 'cat')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
# single class, store
images = Images('foo', [a, a], 'cat', config=['store', 'resize=40,50'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 40, 50, 3))
# single class, store, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 40, 50, 3))
# single class, stream
images = Images('foo', [a, a], 'cat', config=['stream', 'resize=40,50', 'gray'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
# single class, stream, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'cat': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (2, 40, 50))
# multi-class
images = Images('foo', [a, a, a], ['cat', 'dog', 'dog'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.images[0].shape, (1, 128, 128, 3))
self.assertEqual(images.images[1].shape, (2, 128, 128, 3))
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
self.assertEqual(images.images[1].shape, (1, 128, 128, 3))
self.assertEqual(images.labels[0][0], 0)
# multi-class, store
images = Images('foo', [a, a, a], ['cat', 'dog', 'dog'], config=['store'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.images[0].shape, (1, 128, 128, 3))
self.assertEqual(images.images[1].shape, (2, 128, 128, 3))
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
self.assertEqual(images.images[1].shape, (1, 128, 128, 3))
self.assertEqual(images.labels[0][0], 0)
# multi-class, store, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.images[0].shape, (1, 128, 128, 3))
self.assertEqual(images.images[1].shape, (2, 128, 128, 3))
else:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
self.assertEqual(images.images[1].shape, (1, 128, 128, 3))
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0].shape, (2, 128, 128, 3))
self.assertEqual(images.images[1].shape, (1, 128, 128, 3))
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.images[0].shape, (1, 128, 128, 3))
self.assertEqual(images.images[1].shape, (2, 128, 128, 3))
# multi-class, stream
images = Images('foo', [a, a, a], ['cat', 'dog', 'dog'],
config=['stream', 'flat', 'resize=50,50'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.labels[0][0], 0)
# multi-class, stream, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.images[0].shape, (1, 7500))
self.assertEqual(images.images[1].shape, (2, 7500))
else:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0].shape, (2, 7500))
self.assertEqual(images.images[1].shape, (1, 7500))
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0].shape, (2, 7500))
self.assertEqual(images.images[1].shape, (1, 7500))
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.images[0].shape, (1, 7500))
self.assertEqual(images.images[1].shape, (2, 7500))
# [string] are numbers, store
images = Images('foo', ['files/1.jpg', 'files/2.jpg'],
['0', '1'], config=['store'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['0'] == 0:
self.assertEqual(images.classes, {'0': 0, '1': 1})
else:
self.assertEqual(images.classes, {'0': 1, '1': 0})
# [string] are numbers, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['0'] == 0:
self.assertEqual(images.classes, {'0': 0, '1': 1})
else:
self.assertEqual(images.classes, {'0': 1, '1': 0})
os.remove('foo.h5')
def test_037(self):
""" Images - list - bad files """
# bad file in list - single class
images = Images('foo', ['files/1.jpg', 'bad.jpg', 'files/2.jpg'], 0)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
# bad file in list - single class - store
images = Images('foo', ['files/1.jpg', 'bad.jpg', 'files/2.jpg'], 0, config=['store'])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
# bad file in list - single class - store/load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
# bad file in list - single class - stream
images = Images('foo', ['files/1.jpg', 'bad.jpg', 'files/2.jpg'], 0, config=['stream'])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
# bad file in list - single class - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
# bad file in list - empty class
images = Images('foo', ['files/1.jpg', 'bad.jpg', 'files/2.jpg'], [0, 1, 0])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
# bad file in list - empty class - store
images = Images('foo', ['files/1.jpg', 'bad.jpg', 'files/2.jpg'],
[0, 1, 0], config=['store'])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
# bad file in list - empty class - store/load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
# bad file in list - empty class - stream
images = Images('foo', ['files/1.jpg', 'bad.jpg', 'files/2.jpg'],
[0, 1, 0], config=['stream'])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
# bad file in list - empty class - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(images.labels[0][0], 0)
os.remove('foo.h5')
def test_038(self):
""" Images - CSV - local path """
# empty, no header
f = open('files/empty.csv', 'w')
f.close()
images = Images('foo', 'files/empty.csv', config=['image_col=0', 'label_col=1'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 0)
with pytest.raises(EOFError):
images = Images('foo', 'files/empty.csv',
config=['header', 'image_col=0', 'label_col=1'])
# empty, header
f = open('files/empty.csv', 'w')
f.write('header\n')
f.close()
images = Images('foo', 'files/empty.csv', config=['header', 'image_col=0', 'label_col=1'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 0)
# single class
f = open('files/test.csv', 'w')
f.write('0,files/1.jpg\n')
f.write('0,files/2.jpg\n')
f.write('0,files/3.jpg\n')
f.close()
images = Images('foo', 'files/test.csv', config=['label_col=0', 'image_col=1'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'0': 0})
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (3, 128, 128, 3))
# single class, store
images = Images('foo', 'files/test.csv', config=['label_col=0', 'image_col=1', 'store'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'0': 0})
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (3, 128, 128, 3))
# single class, store, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'0': 0})
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (3, 128, 128, 3))
# single class, stream
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'stream', 'resize=(50,60)'])
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (50, 60))
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'0': 0})
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(images.labels[0][0], 0)
# single class, stream, load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (50, 60))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'0': 0})
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0].shape, (3, 50, 60, 3))
# multi class
f = open('files/test.csv', 'w')
f.write('\'0\',files/1.jpg\n')
f.write('\'1\',files/2.jpg\n')
f.write('\'0\',files/3.jpg\n')
f.close()
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'resize=(40,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(images.classes, {"'0'": 1, "'1'": 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
# multi class, store
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'resize=(40,50)', 'store'])
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(images.classes, {"'0'": 1, "'1'": 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
# multi class, store/load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(images.classes, {"'0'": 1, "'1'": 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0][0].shape, (40, 50, 3))
# multi class, stream
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'resize=(40,50)', 'stream', 'gray'])
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(images.classes, {"'0'": 1, "'1'": 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
# multi class, stream/load
images = Images()
images.load('foo')
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(images.classes, {"'0'": 1, "'1'": 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0][0].shape, (40, 50))
os.remove('files/empty.csv')
os.remove('files/test.csv')
def test_039(self):
""" Images - CSV - bad paths """
# bad file in class
f = open('files/test.csv', 'w')
f.write('\'0\',files/1.jpg\n')
f.write('\'1\',files/2.jpg\n')
f.write('\'0\',files/3.jpg\n')
f.write('\'0\',files/bad.jpg\n')
f.write('\'0\',files/3.jpg\n')
f.close()
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'resize=(40,50)'])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(len(images.labels[1]), 3)
else:
self.assertEqual(images.classes, {"'0'": 1, "'1'": 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(len(images.labels[1]), 3)
else:
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0][0].shape, (40, 50, 3))
# bad file in class, store
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'resize=(40,50)', 'store'])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(len(images.labels[1]), 3)
else:
self.assertEqual(images.classes, {"'0'": 1, "'1'": 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(len(images.labels[1]), 3)
else:
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0][0].shape, (40, 50, 3))
# bad file in class, store/load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(len(images.labels[1]), 3)
else:
self.assertEqual(images.classes, {"'0'": 1, "'1'": 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(len(images.labels[1]), 3)
else:
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0][0].shape, (40, 50, 3))
# bad file in class, stream
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'resize=(40,50)', 'stream'])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.labels[1]), 3)
else:
self.assertEqual(images.classes, {"'0'": 1, "'1'": 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.labels[1]), 3)
else:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.labels[1]), 1)
# bad file in class, stream/load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 1)
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(len(images.labels[1]), 3)
else:
self.assertEqual(images.classes, {"'0'": 1, "'1'": 0})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(len(images.labels[1]), 3)
else:
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0][0].shape, (40, 50, 3))
# bad file - empty class
f = open('files/test.csv', 'w')
f.write('\'0\',files/1.jpg\n')
f.write('\'1\',files/2.jpg\n')
f.write('\'0\',files/3.jpg\n')
f.write('\'2\',files/bad.jpg\n')
f.write('\'0\',files/3.jpg\n')
f.close()
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'resize=(40,50)'])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
# bad file- empty class - store
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'resize=(40,50)', 'store'])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
# bad file- empty class - store/load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
# bad file- empty class - stream
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'resize=(40,50)', 'stream'])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 2)
# bad file- empty class - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 4)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
os.remove('files/test.csv')
os.remove('foo.h5')
def test_040(self):
""" Images - CSV - remote path """
# Images - remote files
IMAGE1 = 'https://assets.pernod-ricard.com/uk/media_images/test.jpg'
IMAGE2 = 'https://www.accesshq.com/workspace/images/articles/test-your-technology.jpg'
# multi class
f = open('files/test.csv', 'w')
f.write('\'0\',' + IMAGE1 + '\n')
f.write('\'1\',' + IMAGE2 + '\n')
f.write('\'0\',' + IMAGE1 + '\n')
f.close()
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'resize=(50,50)', 'flatten'])
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(images.classes, {"'1'": 0, "'0'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0][0].shape, (7500,))
# multi class - store
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'resize=(50,50)', 'flatten', 'store'])
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(images.classes, {"'1'": 0, "'0'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0][0].shape, (7500,))
# multi class - store/load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(images.classes, {"'1'": 0, "'0'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
# multi class - stream
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'resize=(50,50)', 'flatten', 'store'])
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(images.classes, {"'1'": 0, "'0'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0][0].shape, (7500,))
# multi class - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.count, 3)
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(images.classes, {"'1'": 0, "'0'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
# bad remote file
f = open('files/test.csv', 'w')
f.write('\'0\',' + IMAGE1 + '\n')
f.write('\'1\',' + IMAGE2 + '\n')
f.write('\'0\',' + IMAGE1 + '\n')
f.write('\'0\',http://foobar17.com/noimage.jpg\n')
f.close()
images = Images('foo', 'files/test.csv',
config=['label_col=0', 'image_col=1', 'resize=(50,50)', 'flatten'])
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.count, 3)
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes["'0'"] == 0:
self.assertEqual(images.classes, {"'0'": 0, "'1'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(images.classes, {"'1'": 0, "'0'": 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(images.images[0][0].shape, (7500,))
os.remove('foo.h5')
os.remove('files/test.csv')
def test_041(self):
""" Images - CSV - memory """
# multi-class - flatten/gray
images = Images('foo', 'files/array.csv',
config=['resize=(40,50)', 'gray', 'flat', 'label_col=0', 'image_col=1'])
self.assertEqual(images.count, 5)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['daisy'] == 0:
self.assertEqual(images.classes, {'daisy': 0, 'cat': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(images.classes, {'daisy': 1, 'cat': 0})
self.assertEqual(images.images[0][0].shape, (2000,))
# multi-class - gray
images = Images('foo', 'files/array.csv',
config=['resize=(40,50)', 'gray', 'label_col=0', 'image_col=1'])
self.assertEqual(images.count, 5)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['daisy'] == 0:
self.assertEqual(images.classes, {'daisy': 0, 'cat': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(images.classes, {'daisy': 1, 'cat': 0})
self.assertEqual(images.images[0][0].shape, (40, 50))
# multi-class - color
images = Images('foo', 'files/array.csv',
config=['resize=(40,50)', 'label_col=0', 'image_col=1'])
self.assertEqual(images.count, 5)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['daisy'] == 0:
self.assertEqual(images.classes, {'daisy': 0, 'cat': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(images.classes, {'daisy': 1, 'cat': 0})
self.assertEqual(images.images[0][0].shape, (40, 50, 3))
# multi-class - store
images = Images('foo', 'files/array.csv',
config=['resize=(40,50)', 'label_col=0', 'image_col=1', 'store'])
self.assertEqual(images.count, 5)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['daisy'] == 0:
self.assertEqual(images.classes, {'daisy': 0, 'cat': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(images.classes, {'daisy': 1, 'cat': 0})
self.assertEqual(images.images[0][0].shape, (40, 50, 3))
# multi-class - store/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 5)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['daisy'] == 0:
self.assertEqual(images.classes, {'daisy': 0, 'cat': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(images.classes, {'daisy': 1, 'cat': 0})
self.assertEqual(images.images[0][0].shape, (40, 50, 3))
# multi-class - stream
images = Images('foo', 'files/array.csv',
config=['resize=(40,50)', 'label_col=0', 'image_col=1', 'stream'])
self.assertEqual(images.count, 5)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 2)
if images.classes['daisy'] == 0:
self.assertEqual(images.classes, {'daisy': 0, 'cat': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
else:
self.assertEqual(images.classes, {'daisy': 1, 'cat': 0})
# multi-class - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 5)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['daisy'] == 0:
self.assertEqual(images.classes, {'daisy': 0, 'cat': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(images.classes, {'daisy': 1, 'cat': 0})
self.assertEqual(images.images[0][0].shape, (40, 50, 3))
os.remove('foo.h5')
def test_042(self):
""" Images - CSV - wrong line size """
# bad file - incomplete embedded data
f = open('files/test.csv', 'w')
f.write('0,"[1,2,3,4"\n')
f.close()
images = Images('foo', 'files/test.csv',
config=['resize=(8,8)', 'label_col=0', 'image_col=1'])
self.assertEqual(images.count, 0)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
# bad file - missing image column
f = open('files/test.csv', 'w')
f.write('0,files/1.jpg\n')
f.write('0\n')
f.write('0,files/2.jpg\n')
f.close()
images = Images('foo', 'files/test.csv',
config=['resize=(8,8)', 'label_col=0', 'image_col=1'])
self.assertEqual(images.count, 2)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
# bad file - blank file
f = open('files/test.csv', 'w')
f.write('0,files/1.jpg\n')
f.write('0,\n')
f.write('0,files/2.jpg\n')
f.close()
images = Images('foo', 'files/test.csv',
config=['resize=(8,8)', 'label_col=0', 'image_col=1'])
self.assertEqual(images.count, 2)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
# bad file - wrong file type
f = open('files/test.csv', 'w')
f.write('0,files/1.jpg\n')
f.write('0,func_test.py\n')
f.write('0,files/2.jpg\n')
f.close()
images = Images('foo', 'files/test.csv',
config=['resize=(8,8)', 'label_col=0', 'image_col=1'])
self.assertEqual(images.count, 2)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
os.remove('files/test.csv')
def test_043(self):
""" Images - JSON - local path """
# single class
f = open('files/test.json', 'w')
f.write("[")
f.write('{"label": 0, "image": "files/1.jpg"},\n')
f.write('{"label": 0, "image": "files/2.jpg"},\n')
f.write('{"label": 0, "image": "files/3.jpg"}\n')
f.write("]")
f.close()
images = Images('foo', 'files/test.json',
config=['resize=(40,50)', 'label_key=label', 'image_key=image'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(images.images[0][0].shape, (40, 50, 3))
# single class - store
images = Images('foo', 'files/test.json',
config=['resize=(40,50)', 'label_key=label', 'image_key=image', 'store'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(images.images[0][0].shape, (40, 50, 3))
# single class - store/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(images.images[0][0].shape, (40, 50, 3))
# single class - stream
images = Images('foo', 'files/test.json',
config=['resize=(40,50)', 'label_key=label', 'image_key=image', 'stream'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(len(images.labels[0]), 3)
# single class - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(images.images[0][0].shape, (40, 50, 3))
# multi class
f = open('files/test.json', 'w')
f.write("[")
f.write('{"label": "cat", "image": "files/1.jpg"},\n')
f.write('{"label": "dog", "image": "files/2.jpg"},\n')
f.write('{"label": "cat", "image": "files/3.jpg"}\n')
f.write("]")
f.close()
images = Images('foo', 'files/test.json',
config=['resize=(40,50)', 'label_key=label', 'image_key=image', 'gray'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
if images.labels[0][0] == 1:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.images[0][0].shape, (40, 50))
# multi class - store
images = Images('foo', 'files/test.json',
config=['resize=(40,50)',
'label_key=label',
'image_key=image',
'gray',
'flat',
'store'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
if images.labels[0][0] == 1:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.images[0][0].shape, (2000,))
# multi class - store/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
if images.labels[0][0] == 1:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.images[0][0].shape, (2000,))
# multi class - stream
images = Images('foo', 'files/test.json',
config=['resize=(40,50)',
'label_key=label',
'image_key=image',
'gray',
'flat',
'stream'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
else:
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
# multi class - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
else:
if images.labels[0][0] == 1:
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[1]), 2)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(images.images[0][0].shape, (2000,))
os.remove('files/test.json')
os.remove('foo.h5')
def test_044(self):
""" Images - JSON - bad local path """
# multi-class
f = open('files/test.json', 'w')
f.write("[")
f.write('{"label": "cat", "image": "files/1.jpg"},\n')
f.write('{"label": "dog", "image": "files/2.jpg"},\n')
f.write('{"label": "cat", "image": "files/nofile.jpg"},\n')
f.write('{"label": "cat", "image": "files/3.jpg"}\n')
f.write("]")
f.close()
images = Images('foo', 'files/test.json',
config=['resize=(40,50)', 'label_key=label', 'image_key=image'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
else:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
else:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
# multi-class - store
images = Images('foo', 'files/test.json',
config=['resize=(40,50)', 'label_key=label', 'image_key=image', 'store'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
else:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
else:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
# multi-class - store/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
else:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
else:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
# multi-class - stream
images = Images('foo', 'files/test.json',
config=['resize=(40,50)', 'label_key=label', 'image_key=image', 'stream'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 2)
else:
self.assertEqual(len(images.labels[0]), 1)
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 2)
else:
self.assertEqual(len(images.labels[0]), 1)
# multi-class - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes['cat'] == 0:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
else:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
else:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
else:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
os.remove('files/test.json')
os.remove('foo.h5')
def test_045(self):
""" Images - JSON - remote path """
IMAGE1 = 'https://assets.pernod-ricard.com/uk/media_images/test.jpg'
IMAGE2 = 'https://www.accesshq.com/workspace/images/articles/test-your-technology.jpg'
# single file
f = open('files/test.json', 'w')
f.write("[")
f.write('{"label": 0, "image": "' + IMAGE1 + '"},\n')
f.write('{"label": 0, "image": "' + IMAGE2 + '"},\n')
f.write('{"label": 0, "image": "' + IMAGE1 + '"}\n')
f.write("]")
f.close()
images = Images('foo', 'files/test.json',
config=['resize=(40,50)', 'label_key=label', 'image_key=image', 'gray'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0][0].shape, (40, 50))
# single file - store
images = Images('foo', 'files/test.json',
config=['resize=(40,50)',
'label_key=label',
'image_key=image',
'gray',
'store'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0][0].shape, (40, 50))
# single file - store/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0][0].shape, (40, 50))
# single file - stream
images = Images('foo', 'files/test.json',
config=['resize=(40,50)',
'label_key=label',
'image_key=image',
'gray',
'stream'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(images.labels[0][0], 0)
# single file - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.images[0][0].shape, (40, 50))
# multi class
f = open('files/test.json', 'w')
f.write("[")
f.write('{"label": 0, "image": "' + IMAGE1 + '"},\n')
f.write('{"label": 1, "image": "' + IMAGE2 + '"},\n')
f.write('{"label": 1, "image": "' + IMAGE1 + '"}\n')
f.write("]")
f.close()
images = Images('foo', 'files/test.json',
config=['resize=(40,50)', 'label_key=label', 'image_key=image', 'gray'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0: 0, 1: 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(len(images.images[1]), 1)
else:
self.assertEqual(images.classes, {0: 1, 1: 0})
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(images.images[0][0].shape, (40, 50))
# multi class - store
images = Images('foo', 'files/test.json',
config=['resize=(40,50)',
'label_key=label',
'image_key=image',
'gray',
'store'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0: 0, 1: 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(len(images.images[1]), 1)
else:
self.assertEqual(images.classes, {0: 1, 1: 0})
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(images.images[0][0].shape, (40, 50))
# multi class - store/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0: 0, 1: 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(len(images.images[1]), 1)
else:
self.assertEqual(images.classes, {0: 1, 1: 0})
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(len(images.images[1]), 1)
self.assertEqual(images.images[0][0].shape, (40, 50))
# multi class - stream
images = Images('foo', 'files/test.json',
config=['resize=(40,50)',
'label_key=label',
'image_key=image',
'gray',
'stream'])
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 2)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0: 0, 1: 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
else:
self.assertEqual(images.classes, {0: 1, 1: 0})
# multi class - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 3)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0: 0, 1: 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 1)
self.assertEqual(len(images.images[0]), 1)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 1)
self.assertEqual(len(images.images[1]), 1)
else:
self.assertEqual(images.classes, {0: 1, 1: 0})
self.assertEqual(images.images[0][0].shape, (40, 50))
os.remove('files/test.json')
os.remove('foo.h5')
def test_046(self):
""" Images - JSON - bad remote files """
IMAGE1 = 'https://assets.pernod-ricard.com/uk/media_images/test.jpg'
IMAGE2 = 'https://www.accesshq.com/workspace/images/articles/test-your-technology.jpg'
# empty class
f = open('files/test.json', 'w')
f.write("[")
f.write('{"label": 0, "image": "http://badfile.ppt"},\n')
f.write('{"label": 1, "image": "' + IMAGE2 + '"},\n')
f.write('{"label": 1, "image": "' + IMAGE1 + '"}\n')
f.write("]")
f.close()
images = Images('foo', 'files/test.json',
config=['resize=(40,50)',
'label_key=label',
'image_key=image',
'gray'])
self.assertEqual(images.count, 2)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0:0, 1:1})
else:
self.assertEqual(images.classes, {0:1, 1:0})
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(images.images[0][0].shape, (40, 50))
# empty class - store
images = Images('foo', 'files/test.json',
config=['resize=(40,50)',
'label_key=label',
'image_key=image',
'gray',
'store'])
self.assertEqual(images.count, 2)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0:0, 1:1})
else:
self.assertEqual(images.classes, {0:1, 1:0})
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(images.images[0][0].shape, (40, 50))
# empty class - store/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 2)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0:0, 1:1})
else:
self.assertEqual(images.classes, {0:1, 1:0})
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(images.images[0][0].shape, (40, 50))
# empty class - stream
images = Images('foo', 'files/test.json',
config=['resize=(40,50)',
'label_key=label',
'image_key=image',
'gray',
'stream'])
self.assertEqual(images.count, 2)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.labels), 1)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0:0, 1:1})
else:
self.assertEqual(images.classes, {0:1, 1:0})
self.assertEqual(len(images.labels[0]), 2)
# empty class - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 2)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
self.assertEqual(images.shape, (40, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0:0, 1:1})
else:
self.assertEqual(images.classes, {0:1, 1:0})
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(images.images[0][0].shape, (40, 50))
os.remove('files/test.json')
os.remove('foo.h5')
def test_047(self):
""" Images - JSON - memory """
f = open('files/test.json', 'w')
f.write("[\n")
f.write('{"label": 0, "image": "[0,1,2,3,4,5,6,7]"},\n')
f.write('{"label": 0, "image": "[10,11,12,13,14,15,16,17]"},\n')
f.write('{"label": 0, "image": "[0,1,2,3,4,5,6,7]"},\n')
f.write('{"label": 1, "image": "[20,21,22,23,24,25,26,27]"},\n')
f.write('{"label": 1, "image": "[0,1,2,3,4,5,6,7]"}\n')
f.write("]\n")
f.close()
# multi class
images = Images('foo', 'files/test.json',
config=['resize=(8,8)',
'label_key=label',
'image_key=image',
'gray'])
self.assertEqual(images.count, 5)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (8, 8))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0: 0, 1: 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(images.classes, {0: 1, 1: 0})
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(images.images[0][0].shape, (8, 8))
# multi class - store
images = Images('foo', 'files/test.json',
config=['resize=(8,8)',
'label_key=label',
'image_key=image',
'gray',
'store'])
self.assertEqual(images.count, 5)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (8, 8))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0: 0, 1: 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(images.classes, {0: 1, 1: 0})
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(images.images[0][0].shape, (8, 8))
# multi class - store/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 5)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (8, 8))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0: 0, 1: 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(images.classes, {0: 1, 1: 0})
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(images.images[0][0].shape, (8, 8))
# multi class - stream
images = Images('foo', 'files/test.json',
config=['resize=(8,8)',
'label_key=label',
'image_key=image',
'gray',
'stream'])
self.assertEqual(images.count, 5)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (8, 8))
self.assertEqual(len(images.labels), 2)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0: 0, 1: 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
else:
self.assertEqual(images.classes, {0: 1, 1: 0})
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
# multi class - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.count, 5)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(images.shape, (8, 8))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
if images.classes[0] == 0:
self.assertEqual(images.classes, {0: 0, 1: 1})
if images.labels[0][0] == 0:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
else:
self.assertEqual(images.classes, {0: 1, 1: 0})
if images.labels[0][0] == 1:
self.assertEqual(len(images.labels[0]), 3)
self.assertEqual(len(images.images[0]), 3)
self.assertEqual(len(images.labels[1]), 2)
self.assertEqual(len(images.images[1]), 2)
else:
self.assertEqual(len(images.labels[0]), 2)
self.assertEqual(len(images.images[0]), 2)
self.assertEqual(len(images.labels[1]), 3)
self.assertEqual(len(images.images[1]), 3)
self.assertEqual(images.images[0][0].shape, (8, 8))
os.remove('foo.h5')
os.remove('files/test.json')
def test_048(self):
""" Images - flatten """
images = Images()
with pytest.raises(TypeError):
images.flatten = 'A'
if os.path.isdir('files/root'):
rmtree('files/root')
os.mkdir('files/root')
os.mkdir('files/root/tmp1')
copy('files/1.jpg', 'files/root/tmp1')
copy('files/2.jpg', 'files/root/tmp1')
# flatten, color
images = Images('foo', 'files/root', config=['resize=(50,50)'])
images.flatten = True
self.assertEqual(images.shape, (50, 50))
self.assertEqual(images.images[0][0].shape, (7500,))
# already flatten
images.flatten = True
self.assertEqual(images.shape, (50, 50))
self.assertEqual(images.images[0][0].shape, (7500,))
# unflatten, color
images.flatten = False
self.assertEqual(images.shape, (50, 50))
self.assertEqual(images.images[0][0].shape, (50, 50, 3))
# flatten, gray
images = Images('foo', 'files/root', config=['resize=(50,50)', 'gray'])
images.flatten = True
self.assertEqual(images.shape, (50, 50))
self.assertEqual(images.images[0][0].shape, (2500,))
# already flatten
images.flatten = True
self.assertEqual(images.shape, (50, 50))
self.assertEqual(images.images[0][0].shape, (2500,))
# unflatten, gray
images.flatten = False
self.assertEqual(images.shape, (50, 50))
self.assertEqual(images.images[0][0].shape, (50, 50))
rmtree('files/root')
def test_049(self):
""" Images - resize """
images = Images()
with pytest.raises(TypeError):
images.resize = 'A'
with pytest.raises(AttributeError):
images.resize = (1,)
with pytest.raises(TypeError):
images.resize = (50, 'A')
with pytest.raises(ValueError):
images.resize = (0, 10)
with pytest.raises(ValueError):
images.resize = (-10, 10)
with pytest.raises(ValueError):
images.resize = (10, 0)
with pytest.raises(ValueError):
images.resize = (10, -1)
if os.path.isdir('files/root'):
rmtree('files/root')
os.mkdir('files/root')
os.mkdir('files/root/tmp1')
copy('files/1.jpg', 'files/root/tmp1')
copy('files/2.jpg', 'files/root/tmp1')
# color
images = Images('foo', 'files/root', config=['resize=(50,50)'])
images.resize = (20, 30)
self.assertEqual(images.images[0][0].shape, (20, 30, 3))
self.assertEqual(images.shape, (20, 30))
# flattened, color
images = Images('foo', 'files/root', config=['resize=(50,50)', 'flat'])
images.resize = (20, 30)
self.assertEqual(images.images[0][0].shape, (20, 30, 3))
self.assertEqual(images.shape, (20, 30))
# gray
images = Images('foo', 'files/root', config=['resize=(50,50)', 'gray'])
images.resize = (20, 30)
self.assertEqual(images.images[0][0].shape, (20, 30))
self.assertEqual(images.shape, (20, 30))
# flattened, gray
images = Images('foo', 'files/root', config=['resize=(50,50)', 'flat', 'gray'])
images.resize = (20, 30)
self.assertEqual(images.images[0][0].shape, (20, 30))
self.assertEqual(images.shape, (20, 30))
rmtree('files/root')
_async_obj = None
SLEEP = 3
def done_048(self, obj):
self._async_obj = obj
def test_050(self):
""" Images - async handler """
# basic
Images('foo', ['files/1.jpg', 'files/2.jpg'], [0, 1], ehandler=self.done_048)
time.sleep(self.SLEEP)
self.assertTrue(self._async_obj is not None)
self.assertEqual(self._async_obj.name, 'foo')
self.assertEqual(self._async_obj.count, 2)
self.assertEqual(self._async_obj.shape, (128, 128))
self.assertEqual(len(self._async_obj.images), 2)
self.assertEqual(len(self._async_obj.labels), 2)
self._async_obj = None
# basic - store
Images('foo', ['files/1.jpg', 'files/2.jpg'], [0, 1],
ehandler=self.done_048, config=['store', 'resize=50,50'])
time.sleep(self.SLEEP)
self.assertTrue(self._async_obj is not None)
self.assertEqual(self._async_obj.name, 'foo')
self.assertEqual(self._async_obj.count, 2)
self.assertEqual(self._async_obj.shape, (50, 50))
self.assertEqual(len(self._async_obj.images), 2)
self.assertEqual(len(self._async_obj.labels), 2)
self._async_obj = None
# basic - store/load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
# basic - stream
Images('foo', ['files/1.jpg', 'files/2.jpg'], [0, 1],
ehandler=self.done_048, config=['stream', 'resize=50,50'])
time.sleep(self.SLEEP)
self.assertTrue(self._async_obj is not None)
self.assertEqual(self._async_obj.name, 'foo')
self.assertEqual(self._async_obj.count, 2)
self.assertEqual(self._async_obj.shape, (50, 50))
self.assertEqual(len(self._async_obj.labels), 2)
self._async_obj = None
# basic - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
# error
Images('foo', ['files/1.jpg', 'bad.jpg', 'files/2.jpg'],
[0, 1, 2], ehandler=self.done_048)
time.sleep(self.SLEEP)
self.assertTrue(self._async_obj is not None)
self.assertEqual(self._async_obj.name, 'foo')
self.assertEqual(self._async_obj.count, 2)
self.assertEqual(self._async_obj.shape, (128, 128))
self.assertEqual(len(self._async_obj.images), 2)
self.assertEqual(len(self._async_obj.labels), 2)
self.assertEqual(self._async_obj.fail, 1)
self.assertEqual(len(self._async_obj.errors), 1)
self._async_obj = None
# error - store
Images('foo', ['files/1.jpg', 'bad.jpg', 'files/2.jpg'],
[0, 1, 2], ehandler=self.done_048, config=['store'])
time.sleep(self.SLEEP)
self.assertTrue(self._async_obj is not None)
self.assertEqual(self._async_obj.name, 'foo')
self.assertEqual(self._async_obj.count, 2)
self.assertEqual(self._async_obj.shape, (128, 128))
self.assertEqual(len(self._async_obj.images), 2)
self.assertEqual(len(self._async_obj.labels), 2)
self.assertEqual(self._async_obj.fail, 1)
self.assertEqual(len(self._async_obj.errors), 1)
self._async_obj = None
# error - store/load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
# error - stream
Images('foo', ['files/1.jpg', 'bad.jpg', 'files/2.jpg'],
[0, 1, 2], ehandler=self.done_048, config=['stream'])
time.sleep(self.SLEEP)
self.assertTrue(self._async_obj is not None)
self.assertEqual(self._async_obj.name, 'foo')
self.assertEqual(self._async_obj.count, 2)
self.assertEqual(self._async_obj.shape, (128, 128))
self.assertEqual(len(self._async_obj.labels), 2)
self.assertEqual(self._async_obj.fail, 1)
self.assertEqual(len(self._async_obj.errors), 1)
self._async_obj = None
# error - stream/load
images = Images()
images.load('foo')
self.assertEqual(images.name, 'foo')
self.assertEqual(images.count, 2)
self.assertEqual(images.shape, (128, 128))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.fail, 1)
self.assertEqual(len(images.errors), 1)
os.remove('foo.h5')
def done_049(self, obj, arg):
self._async_obj = obj
self._async_arg = arg
def test_051(self):
""" Images - ehandler / args """
Images('foo', ['files/1.jpg', 'files/2.jpg'], [0, 1], ehandler=(self.done_049, 17))
time.sleep(self.SLEEP)
self.assertTrue(self._async_obj is not None)
self.assertEqual(self._async_obj.name, 'foo')
self.assertEqual(self._async_obj.count, 2)
self.assertEqual(self._async_obj.shape, (128, 128))
self.assertEqual(len(self._async_obj.images), 2)
self.assertEqual(len(self._async_obj.labels), 2)
self.assertEqual(self._async_arg, (17,))
self._async_obj = None
def test_052(self):
""" Images - dtype """
# float64
images = Images('foo', ['files/1.jpg'], 0,
config=['resize=50,50', 'gray', 'flat', 'float64', 'store'])
self.assertTrue(images.dtype == np.float64)
self.assertEqual(images.images[0][0].dtype, np.float64)
images = Images()
images.load('foo')
self.assertTrue(images.dtype == np.float64)
self.assertEqual(images.images[0][0].dtype, np.float64)
images.flatten = False
self.assertEqual(images.images[0][0].dtype, np.float64)
images.flatten = True
self.assertEqual(images.images[0][0].dtype, np.float64)
# TODO resize complains about type 13 (32F C3) - must really mean 64?
#images.resize = (25,25)
#self.assertEqual(images.images[0][0].dtype, np.float64)
# float32
images = Images('foo', ['files/1.jpg'], 0,
config=['resize=50,50', 'gray', 'flat', 'float32', 'store'])
self.assertTrue(images.dtype == np.float32)
self.assertEqual(images.images[0][0].dtype, np.float32)
images = Images()
images.load('foo')
self.assertTrue(images.dtype == np.float32)
self.assertEqual(images.images[0][0].dtype, np.float32)
images.flatten = False
self.assertEqual(images.images[0][0].dtype, np.float32)
images.flatten = True
self.assertEqual(images.images[0][0].dtype, np.float32)
images.resize = (25, 25)
self.assertEqual(images.images[0][0].dtype, np.float32)
# float16
images = Images('foo', ['files/1.jpg'], 0,
config=['resize=50,50', 'gray', 'flat', 'float16', 'store'])
self.assertEqual(images.dtype, np.float16)
self.assertEqual(images.images[0][0].dtype, np.float16)
images = Images()
images.load('foo')
self.assertEqual(images.dtype, np.float16)
self.assertEqual(images.images[0][0].dtype, np.float16)
images.flatten = False
self.assertEqual(images.images[0][0].dtype, np.float16)
images.flatten = True
self.assertEqual(images.images[0][0].dtype, np.float16)
# TODO resize complains about type 24 (16F C3)
# images.resize = (25,25)
# self.assertEqual(images.images[0][0].dtype, np.float16)
# uint16
images = Images('foo', ['files/1.jpg'], 0,
config=['resize=50,50', 'gray', 'flat', 'uint16', 'store'])
self.assertEqual(images.dtype, np.uint16)
self.assertEqual(images.images[0][0].dtype, np.uint16)
images = Images()
images.load('foo')
self.assertEqual(images.dtype, np.uint16)
self.assertEqual(images.images[0][0].dtype, np.uint16)
images.flatten = False
self.assertEqual(images.images[0][0].dtype, np.uint16)
images.flatten = True
self.assertEqual(images.images[0][0].dtype, np.uint16)
images.resize = (25, 25)
self.assertEqual(images.images[0][0].dtype, np.uint16)
# uint8
images = Images('foo', ['files/1.jpg'], 0,
config=['resize=50,50', 'gray', 'flat', 'uint8', 'store'])
self.assertEqual(images.dtype, np.uint8)
self.assertEqual(images.images[0][0].dtype, np.uint8)
images = Images()
images.load('foo')
self.assertEqual(images.dtype, np.uint8)
self.assertEqual(images.images[0][0].dtype, np.uint8)
images.flatten = False
self.assertEqual(images.images[0][0].dtype, np.uint8)
images.flatten = True
self.assertEqual(images.images[0][0].dtype, np.uint8)
images.resize = (25, 25)
self.assertEqual(images.images[0][0].dtype, np.uint8)
os.remove('foo.h5')
def test_053(self):
""" Images - image types """
# JPG 8bpp Gray
images = Images('foo', ['files/8gray.jpg'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# JPG 8bpp RGB
images = Images('foo', ['files/8rgb.jpg'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# J2K 8bpp RGB
images = Images('foo', ['files/8rgb.jp2'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# PNG 8bpp Gray
images = Images('foo', ['files/8gray.png'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# PNG 8bpp RGB
images = Images('foo', ['files/8rgb.png'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# PNG 8bpp RGBA
images = Images('foo', ['files/8rgba.png'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# PNG 16bit RGB
images = Images('foo', ['files/16rgb.png'], 0, config=['uint16', '16bpp'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() > 256)
# PNG 16bit RGBA
images = Images('foo', ['files/16rgba.png'], 0, config=['uint16', '16bpp'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() > 256)
# TIF 8bpp Gray
images = Images('foo', ['files/8gray.tiff'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# TIF 8bpp RGB
images = Images('foo', ['files/8rgb.tiff'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# TIFF 8bpp RGBA
images = Images('foo', ['files/8rgba.tiff'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# TIFF 16bpp RGB
images = Images('foo', ['files/16rgb.tiff'], 0, config=['uint16'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# TIFF 16bpp RGBA
images = Images('foo', ['files/16rgba.tiff'], 0, config=['uint16'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# GIF 8bpp RGB
images = Images('foo', ['files/8rgb.gif'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# GIF 8bpp RGBA
images = Images('foo', ['files/8rgba.gif'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# GIF 16bpp RGB
images = Images('foo', ['files/16rgb.gif'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# GIF 16bpp RGBA
images = Images('foo', ['files/16rgba.gif'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# BMP 8bpp Gray
images = Images('foo', ['files/8gray.bmp'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# BMP 8bpp RGB
images = Images('foo', ['files/8rgb.bmp'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
# BMP 16bpp RGB
images = Images('foo', ['files/16rgb.bmp'], 0, config=['uint8'])
self.assertEqual(images.count, 1)
self.assertTrue(images.images[0][0].max() < 256)
def test_054(self):
""" Images - image types """
# gif - grayscale
images = Images('foo', ['files/8rgb.gif'], 0, config=['grayscale'])
self.assertEqual(images.count, 1)
# gay tiff - color
images = Images('foo', ['files/16gray.tif'], 0, config=['16bpp'])
self.assertEqual(images.count, 1)
# 2 channels - color
images = Images('foo', ['files/16_2ch.bmp'], 0, config=['grayscale', '16bpp'])
self.assertEqual(images.count, 1)
# unsopported format
images = Images('foo', ['files/16mch.raw'], 0)
self.assertEqual(images.count, 0)
def test_055(self):
""" Images - config setting: normalize """
# 8bpp
a = cv2.imread('files/1.jpg')
images = Images('foo', [a], 0, config=['gray', 'flat', 'norm=pos'])
self.assertEqual("%.3f" % images.images[0][0][0], "0.420")
images = Images('foo', [a], 0, config=['gray', 'flat', 'norm=zero'])
self.assertEqual("%.3f" % images.images[0][0][0], "-0.161")
images = Images('foo', [a], 0, config=['gray', 'flat', 'norm=std'])
self.assertEqual("%.3f" % images.images[0][0][0], "-0.062")
# 16bpp
images = Images('foo', ['files/16rgb.png'], 0,
config=['gray', 'flat', 'norm=pos', '16bpp'])
self.assertEqual("%.3f" % images.images[0][0][0], "0.116")
images = Images('foo', ['files/16rgb.png'], 0,
config=['gray', 'flat', 'norm=zero', '16bpp'])
self.assertEqual("%.3f" % images.images[0][0][0], "-0.767")
images = Images('foo', ['files/16rgb.png'], 0, config=['gray', 'flat', 'norm=std', '16bpp'])
self.assertEqual("%.3f" % images.images[0][0][0], "-2.156")
def test_056(self):
""" Images - property gray """
images = Images('foo', ['files/1.jpg', 'files/2.jpg'], 0,
config=['resize=(50,50)'])
self.assertEqual(images.images[0].shape, (2, 50, 50, 3))
images.gray = False
self.assertEqual(images.images[0].shape, (2, 50, 50, 3))
images.gray = True
self.assertEqual(images.images[0].shape, (2, 50, 50))
images.gray = True
self.assertEqual(images.images[0].shape, (2, 50, 50))
def test_057(self):
""" Images - split - setter - bad arguments """
images = Images()
with pytest.raises(TypeError):
images.split = 'A'
with pytest.raises(ValueError):
images.split = 12.6
with pytest.raises(ValueError):
images.split = 1.0
with pytest.raises(TypeError):
images.split = 0.6, 'a'
def test_058(self):
""" Images - split - single class - setter """
images = Images('foo', ['files/1.jpg', 'files/2.jpg'], 0, config=['store'])
images.split = 0.5, 12
self.assertEqual(len(images._train), 1)
self.assertEqual(len(images._test), 1)
images = Images('foo', ['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'], 0)
images.split = 0.25, 12
self.assertEqual(len(images._train), 3)
self.assertEqual(len(images._test), 1)
# load
images = Images()
images.load('foo')
images.split = 0.5, 12
self.assertEqual(len(images._train), 1)
self.assertEqual(len(images._test), 1)
images = Images('foo', ['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'], 0)
images.split = 0.25, 12
self.assertEqual(len(images._train), 3)
self.assertEqual(len(images._test), 1)
os.remove('foo.h5')
def test_059(self):
""" Images - split - multi class - setter """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/3.jpg', 'files/1.jpg', 'files/2.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1], config=['store'])
images.split = 0.25, 12
self.assertEqual(len(images._train), 6)
self.assertEqual(len(images._test), 2)
# load
images = Images()
images.load('foo')
images.split = 0.25, 12
self.assertEqual(len(images._train), 6)
self.assertEqual(len(images._test), 2)
os.remove('foo.h5')
def test_060(self):
""" Images - split - single class - getter """
images = Images('foo', ['files/1.jpg', 'files/2.jpg'], 0, config=['store'])
images.split = 0.5, 12
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (1, 128, 128, 3))
self.assertEqual(X_test.shape, (1, 128, 128, 3))
self.assertEqual(Y_train[0][0], [1])
self.assertEqual(Y_test[0][0], [1])
self.assertEqual(type(Y_train[0][0]), np.uint8)
self.assertEqual(type(Y_test[0][0]), np.uint8)
self.assertEqual(images.classes, {'0': 0})
# load
images = Images()
images.load('foo')
images.split = 0.5, 12
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (1, 128, 128, 3))
self.assertEqual(X_test.shape, (1, 128, 128, 3))
self.assertEqual(Y_train[0][0], [1])
self.assertEqual(Y_test[0][0], [1])
self.assertEqual(images.classes, {'0': 0})
os.remove('foo.h5')
def test_061(self):
""" Images - split - multi class - getter """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/3.jpg', 'files/1.jpg', 'files/2.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1], config=['store'])
images.split = 0.25, 12
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (6, 128, 128, 3))
self.assertEqual(X_test.shape, (2, 128, 128, 3))
self.assertEqual(Y_train.shape, (6, 2))
self.assertEqual(Y_test.shape, (2, 2))
self.assertEqual(Y_train[0][0], 1)
self.assertEqual(Y_train[0][1], 0)
self.assertEqual(images.classes, {'0': 0, '1': 1})
# load
images = Images()
images.load('foo')
images.split = 0.25, 12
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (6, 128, 128, 3))
self.assertEqual(X_test.shape, (2, 128, 128, 3))
self.assertEqual(Y_train.shape, (6, 2))
self.assertEqual(Y_test.shape, (2, 2))
self.assertEqual(Y_train[0][0], 1)
self.assertEqual(Y_train[0][1], 0)
self.assertEqual(images.classes, {'0': 0, '1': 1})
os.remove('foo.h5')
def test_062(self):
""" Images - split - single class - getter, class != 0 """
images = Images('foo', ['files/1.jpg', 'files/2.jpg'], 1, config=['store'])
images.split = 0.5, 12
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (1, 128, 128, 3))
self.assertEqual(X_test.shape, (1, 128, 128, 3))
self.assertEqual(Y_train[0][0], 1)
self.assertEqual(Y_test[0][0], 1)
self.assertEqual(images.classes, {'1': 0})
# load
images = Images()
images.load('foo')
images.split = 0.5, 12
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (1, 128, 128, 3))
self.assertEqual(X_test.shape, (1, 128, 128, 3))
self.assertEqual(Y_train[0][0], 1)
self.assertEqual(Y_test[0][0], 1)
self.assertEqual(images.classes, {'1': 0})
def test_063(self):
""" Images - split - single string class - setter - string """
images = Images('foo', ['files/1.jpg', 'files/2.jpg'], 'cat', config=['store'])
images.split = 0.5, 12
self.assertEqual(len(images._train), 1)
self.assertEqual(len(images._test), 1)
# load
images = Images()
images.load('foo')
images.split = 0.5, 12
self.assertEqual(len(images._train), 1)
self.assertEqual(len(images._test), 1)
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
'cat')
images.split = 0.25, 12
self.assertEqual(len(images._train), 3)
self.assertEqual(len(images._test), 1)
self.assertEqual(images.classes, {'cat': 0})
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
'cat')
images.split = 0.25, 12
self.assertEqual(len(images._train), 3)
self.assertEqual(len(images._test), 1)
self.assertEqual(images.classes, {'cat': 0})
# load
os.remove('foo.h5')
def test_064(self):
""" Images - split - multi class - setter - string """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/3.jpg', 'files/1.jpg', 'files/2.jpg'],
['cat', 'cat', 'cat', 'cat', 'dog', 'dog', 'dog', 'dog'],
config=['store'])
images.split = 0.25, 12
self.assertEqual(len(images._train), 6)
self.assertEqual(len(images._test), 2)
# load
images = Images()
images.load('foo')
images.split = 0.25, 12
self.assertEqual(len(images._train), 6)
self.assertEqual(len(images._test), 2)
os.remove('foo.h5')
def test_065(self):
""" Images - split - single class - getter - string """
images = Images('foo', ['files/1.jpg', 'files/2.jpg'], 'cat', config=['store'])
images.split = 0.5, 12
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (1, 128, 128, 3))
self.assertEqual(X_test.shape, (1, 128, 128, 3))
self.assertEqual(Y_train[0], [1])
self.assertEqual(Y_test[0], [1])
self.assertEqual(images.classes, {'cat': 0})
# load
images = Images()
images.load('foo')
images.split = 0.5, 12
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (1, 128, 128, 3))
self.assertEqual(X_test.shape, (1, 128, 128, 3))
self.assertEqual(Y_train[0], [1])
self.assertEqual(Y_test[0], [1])
self.assertEqual(images.classes, {'cat': 0})
os.remove('foo.h5')
def test_066(self):
""" Images - split - multi class - getter - string """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/3.jpg', 'files/1.jpg', 'files/2.jpg'],
['cat', 'cat', 'cat', 'cat', 'dog', 'dog', 'dog', 'dog'],
config=['store'])
images.split = 0.25, 12
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (6, 128, 128, 3))
self.assertEqual(X_test.shape, (2, 128, 128, 3))
self.assertEqual(Y_train.shape, (6, 2))
self.assertEqual(Y_test.shape, (2, 2))
self.assertEqual(Y_train[0][0], 1)
self.assertEqual(Y_train[0][1], 0)
if images.classes['cat'] == 1:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
else:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
# load
images = Images()
images.load('foo')
images.split = 0.25, 12
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (6, 128, 128, 3))
self.assertEqual(X_test.shape, (2, 128, 128, 3))
self.assertEqual(Y_train.shape, (6, 2))
self.assertEqual(Y_test.shape, (2, 2))
# TODO: not sure if error is due to bug if just random shuffle
self.assertEqual(Y_train[0][0], 1)
self.assertEqual(Y_train[0][1], 0)
if images.classes['cat'] == 1:
self.assertEqual(images.classes, {'cat': 1, 'dog': 0})
else:
self.assertEqual(images.classes, {'cat': 0, 'dog': 1})
os.remove('foo.h5')
def test_067(self):
""" Images - split, getter only -- """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg',
'files/8rgb.jpg', 'files/8rgb.png'],
'cat')
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (4, 128, 128, 3))
self.assertEqual(X_test.shape, (1, 128, 128, 3))
self.assertEqual(Y_train[0], [1])
self.assertEqual(Y_test[0], [1])
self.assertEqual(images.classes, {'cat': 0})
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 0, 0, 1, 1])
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (5, 128, 128, 3))
self.assertEqual(X_test.shape, (3, 128, 128, 3))
self.assertEqual(Y_train.shape, (5, 2))
self.assertEqual(Y_test.shape, (3, 2))
self.assertEqual(Y_train.dtype, np.uint8)
self.assertEqual(Y_test.dtype, np.uint8)
def test_068(self):
""" Images - split 100% training """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg',
'files/8rgb.jpg', 'files/8rgb.png'],
'cat')
images.split = 0.0, 12
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (5, 128, 128, 3))
self.assertEqual(X_test, None)
self.assertEqual(Y_train.shape, (5, 1))
self.assertEqual(Y_test, None)
def test_069(self):
""" Images - unevent split -- """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 0, 0, 1, 1])
images.split = 0.8, 12
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.shape, (2, 128, 128, 3))
self.assertEqual(X_test.shape, (6, 128, 128, 3))
self.assertEqual(Y_train.shape, (2, 2))
self.assertEqual(Y_test.shape, (6, 2))
def test_070(self):
""" Images - split - uint8, then normalize -- """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 0, 0, 1, 1], config=['uint8'])
self.assertEqual(images.dtype, np.uint8)
images.split = 0.5, 12
self.assertEqual(images.dtype, np.uint8)
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.dtype, np.float32)
self.assertEqual(X_test.dtype, np.float32)
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 0, 0, 1, 1], config=['uint16'])
self.assertEqual(images.dtype, np.uint16)
images.split = 0.5, 12
self.assertEqual(images.dtype, np.uint16)
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(X_train.dtype, np.float32)
self.assertEqual(X_test.dtype, np.float32)
def test_071(self):
""" images - split, shuffle order -- """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1])
images.split = 0.5, 12
self.assertEqual(len(images._train), 4)
self.assertEqual(images._train[0], (0, 0))
self.assertEqual(images._train[1], (1, 0))
self.assertEqual(images._train[2], (0, 2))
self.assertEqual(images._train[3], (1, 2))
self.assertEqual(len(images._test), 4)
self.assertEqual(images._test[0], (0, 1))
self.assertEqual(images._test[1], (0, 3))
self.assertEqual(images._test[2], (1, 3))
self.assertEqual(images._test[3], (1, 1))
def test_072(self):
""" images - split, more shuffle order -- """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 0, 0, 1, 1])
images.split = 0.25, 12
X_train, X_test, Y_train, Y_test = images.split
self.assertEqual(len(images._train), 5)
self.assertEqual(len(X_train), 5)
self.assertEqual(len(X_test), 3)
self.assertEqual(len(Y_train), 5)
self.assertEqual(len(Y_test), 3)
for _ in range(len(images._train)):
# this is the label index
y = images._train[_][0]
self.assertEqual(Y_train[_][y], 1)
def test_073(self):
""" Images - split / next -- no image data """
images = Images()
with pytest.raises(AttributeError):
images.split = 0.5
images = Images()
with pytest.raises(AttributeError):
x, x, x, x = images.split
images = Images()
with pytest.raises(AttributeError):
next(images)
def test_074(self):
""" Images -next() operator """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1], config=['float16'])
images.split = 0.5, 12
# loop thru list twice
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float16)
self.assertEqual(next(images), (None, None))
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float16)
# load
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1], config=['float16', 'store'])
images = Images()
images.load('foo')
images.split = 0.5, 12
# loop thru list twice
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float16)
self.assertEqual(next(images), (None, None))
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float16)
os.remove('foo.h5')
def test_075(self):
""" Images -next() operator / normalize """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1], config=['uint8'])
images.split = 0.5, 12
# loop thru list twice
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float32)
self.assertEqual(next(images), (None, None))
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float32)
def test_076(self):
""" Images -next() operator / implicit split """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1])
# loop thru list twice
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float32)
self.assertEqual(next(images), (None, None))
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float32)
def test_077(self):
""" Images - next() - shuffle order -- """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1])
images.split = 0.5, 12
x, y = next(images)
self.assertEqual(list(y), [1, 0])
x, y = next(images)
self.assertEqual(list(y), [0, 1])
x, y = next(images)
self.assertEqual(list(y), [1, 0])
x, y = next(images)
self.assertEqual(list(y), [0, 1])
# None
x, y = next(images)
# New order
x, y = next(images)
self.assertEqual(list(y), [0, 1])
x, y = next(images)
self.assertEqual(list(y), [1, 0])
x, y = next(images)
self.assertEqual(list(y), [1, 0])
x, y = next(images)
self.assertEqual(list(y), [0, 1])
def test_078(self):
""" Images - minibatch - bad args """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1])
with pytest.raises(TypeError):
images.minibatch = 'a'
with pytest.raises(ValueError):
images.minibatch = 0
images = Images()
with pytest.raises(AttributeError):
images.minibatch = 2
def test_079(self):
""" Images - minibatch - setter """
# explicit split
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 0, 0, 1, 1])
images.split = 0.5, 12
images.minibatch = 2
self.assertEqual(images._minisz, 2)
self.assertEqual(images._trainsz, 4)
# implicit split
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 0, 0, 1, 1])
images.minibatch = 2
self.assertEqual(images._minisz, 2)
self.assertEqual(images._trainsz, 5)
def test_080(self):
""" Images - minibatch - getter """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1])
images.split = 0.5, 12
images.minibatch = 2
g = images.minibatch
if True:
step = 0
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (2, 128, 128, 3))
self.assertEqual(y_batch.shape, (2, 2))
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
# second batch
elif step == 2:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
# next epoch
elif step == 3:
self.assertEqual(list(y_batch[0]), [0, 1])
self.assertEqual(list(y_batch[1]), [1, 0])
else:
break
def test_081(self):
""" stream file csv with urls paths """
url = 'https://raw.githubusercontent.com/gapml/CV/master/tests/files/fp_urls.csv'
images = Images('foo', url,
config=['label_col=1', 'image_col=0', 'resize=(50,50)', 'header'])
if images.count == 25:
self.assertEqual(images.count, 25)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(len(images.images), 5)
self.assertEqual(len(images.labels), 5)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(images.classes, {'daisy': 0,
'dandelion': 1,
'roses': 2,
'sunflowers': 3,
'tulips': 4})
if images.count == 25:
self.assertEqual(len(images.labels[0]), 5)
self.assertEqual(len(images.images[0]), 5)
self.assertEqual(images.images[0][0].shape, (50, 50, 3))
bad_url = 'https://raw.githubusercontent.com/gapml/CV/master/tests/file/fp_urls.csv'
with pytest.raises(OSError):
images = Images('foo', bad_url,
config=['label_col=1', 'image_col=0',
'resize=(50,50)', 'header'])
def test_082(self):
""" stream file json with urls paths """
url = 'https://raw.githubusercontent.com/gapml/CV/master/tests/files/fp_urls.json'
images = Images('foo', url,
config=['label_key=image_label', 'image_key=image_key', 'resize=(50,50)'])
if images.count == 25:
self.assertEqual(images.count, 25)
self.assertEqual(images.fail, 0)
self.assertEqual(len(images.errors), 0)
self.assertEqual(len(images.images), 5)
self.assertEqual(len(images.labels), 5)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(images.classes, {'daisy': 0,
'dandelion': 1,
'roses': 2,
'sunflowers': 3,
'tulips': 4})
if images.count == 25:
self.assertEqual(len(images.labels[0]), 5)
self.assertEqual(len(images.images[0]), 5)
self.assertEqual(images.images[0][0].shape, (50, 50, 3))
bad_url = 'https://raw.githubusercontent.com/gapml/CV/master/tests/file/fp_urls.json'
with pytest.raises(OSError):
images = Images('foo', bad_url,
config=['label_key=image_label',
'image_key=image_key',
'resize=(50,50)'])
def test_083(self):
""" Images - stratify - bad args """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1])
with pytest.raises(AttributeError):
images.stratify = 2, 0.5, 6, 6
with pytest.raises(AttributeError):
images.stratify = 'a'
with pytest.raises(TypeError):
images.stratify = 2, 'a'
with pytest.raises(TypeError):
images.stratify = 2, 0.6, 'a'
with pytest.raises(ValueError):
images.stratify = 0
with pytest.raises(ValueError):
images.stratify = 2, 1.0
with pytest.raises(ValueError):
images.stratify = 1, 0.5
images = Images()
with pytest.raises(AttributeError):
images.stratify = 1, 0.5
def test_084(self):
""" Images - stratify - setter - batch size """
images = Images('foo', ['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1])
images.stratify = 2
self.assertEqual(images._minisz, 2)
images = Images('foo', ['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1])
images.stratify = 2, 0.5
self.assertEqual(images._minisz, 2)
images = Images('foo', ['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1])
images.stratify = 2, 0.5, 12
self.assertEqual(images._minisz, 2)
def test_085(self):
""" Images - stratify - setter - getter """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1])
images.stratify = 2, 0.5, 12
g = images.stratify
if True:
step = 0
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (2, 128, 128, 3))
self.assertEqual(y_batch.shape, (2, 2))
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
elif step == 2:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
elif step == 3:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
else:
break
def test_086(self):
""" Images - += Images - invalid """
images1 = Images('foo', ['files/1.jpg', 'files/2.jpg'], 0,
config=['resize=(50,50)'])
with pytest.raises(TypeError):
images1 += 1
images1 = Images('foo', ['files/1.jpg', 'files/2.jpg'], 0,
config=['resize=(50,50)'])
images2 = Images('foo', ['files/3.jpg', 'files/8rgb.jpg'], 0,
config=['resize=(40,50)'])
with pytest.raises(AttributeError):
images1 += images2
images1 = Images('foo', ['files/1.jpg', 'files/2.jpg'], 0,
config=['resize=(50,50)', 'gray'])
images2 = Images('foo', ['files/3.jpg', 'files/8rgb.jpg'], 0,
config=['resize=(50,50)'])
with pytest.raises(AttributeError):
images1 += images2
images1 = Images('foo', ['files/1.jpg', 'files/2.jpg'], 0,
config=['resize=(50,50)', 'uint8'])
images2 = Images('foo', ['files/3.jpg', 'files/8rgb.jpg'], 0,
config=['resize=(50,50)'])
with pytest.raises(AttributeError):
images1 += images2
def test_087(self):
""" Images - += Images - same single class """
images1 = Images('foo', ['files/1.jpg', 'files/2.jpg'], 0,
config=['resize=(50,50)'])
images2 = Images('foo', ['files/1.jpg', 'files/8rgb.jpg', 'nonexist.jpg'], 0,
config=['resize=(50,50)'])
images1 += images2
self.assertEqual(images1.count, 4)
self.assertEqual(images1.fail, 1)
self.assertEqual(images1.classes, {'0': 0})
self.assertEqual(len(images1), 1)
self.assertEqual(images1.images[0].shape, (4, 50, 50, 3))
self.assertEqual(images1.labels[0].shape, (4, ))
def test_088(self):
""" Images - += Images - different class """
images1 = Images('foo', ['files/1.jpg', 'files/2.jpg'], 0,
config=['resize=(50,50)'])
images2 = Images('foo', ['files/1.jpg', 'files/8rgb.jpg', 'nonexist.jpg'], 1,
config=['resize=(50,50)'])
self.assertEqual(images2.classes, {'1': 0})
images1 += images2
self.assertEqual(images1.count, 4)
self.assertEqual(images1.fail, 1)
self.assertEqual(images1.classes, {'0': 0, '1': 1})
self.assertEqual(len(images1), 2)
self.assertEqual(images1.images[0].shape, (2, 50, 50, 3))
self.assertEqual(images1.images[1].shape, (2, 50, 50, 3))
self.assertEqual(images1.labels[0].shape, (2, ))
self.assertEqual(images1.labels[1].shape, (2, ))
self.assertEqual(images1.labels[0][0], 0)
self.assertEqual(images1.labels[1][0], 1)
def test_089(self):
""" Images - += Images - different metadata """
images1 = Images('foo', ['files/1.jpg', 'files/2.jpg'], 0,
config=['resize=(50,50)', 'author=sam', 'src=x', 'desc=aa', 'license=c0'])
images2 = Images('foo', ['files/1.jpg', 'files/8rgb.jpg'], 0,
config=['resize=(50,50)', 'author=sue', 'src=y', 'desc=bb', 'license=c1'])
images1 += images2
self.assertEqual(images1.author, 'sam,sue')
self.assertEqual(images1.src, 'x,y')
self.assertEqual(images1.desc, 'aa,bb')
self.assertEqual(images1.license, 'c0,c1')
def test_090(self):
""" Images Constructor - no images, augment argument """
images = Images(augment=None)
images = Images(augment=[])
images = Images(augment=['edge'])
images = Images(augment=['denoise'])
with pytest.raises(TypeError):
images = Images(augment=7)
with pytest.raises(TypeError):
images = Images(augment='7')
with pytest.raises(AttributeError):
images = Images(augment=[7])
with pytest.raises(AttributeError):
images = Images(augment=['foo'])
images = Images(augment=['zoom=1.5'])
with pytest.raises(AttributeError):
images = Images(augment=['zoom='])
with pytest.raises(AttributeError):
images = Images(augment=['zoom=-1'])
with pytest.raises(AttributeError):
images = Images(augment=['zoom=abc'])
images = Images(augment=['flip=horizontal'])
images = Images(augment=['flip=vertical'])
images = Images(augment=['flip=both'])
with pytest.raises(AttributeError):
images = Images(augment=['flip=a'])
with pytest.raises(AttributeError):
images = Images(augment=['flip=1'])
images = Images(augment=['rotate=-90,90'])
with pytest.raises(AttributeError):
images = Images(augment=['rotate='])
with pytest.raises(AttributeError):
images = Images(augment=['rotate=1'])
with pytest.raises(AttributeError):
images = Images(augment=['rotate=2,a'])
with pytest.raises(AttributeError):
images = Images(augment=['rotate=0,360'])
with pytest.raises(AttributeError):
images = Images(augment=['rotate=-360,0'])
images = Images(augment=['contrast=2.0'])
images = Images(augment=['brightness=50'])
images = Images(augment=['brightness=50', 'contrast=2.0'])
images = Images(augment=['contrast=2'])
images = Images(augment=['brightness=50.5'])
images = Images(augment=['brightness=50.5', 'contrast=2'])
with pytest.raises(AttributeError):
images = Images(augment=['contrast='])
with pytest.raises(AttributeError):
images = Images(augment=['contrast=-1.0'])
with pytest.raises(AttributeError):
images = Images(augment=['contrast=4.0'])
with pytest.raises(AttributeError):
images = Images(augment=['contrast=abc'])
with pytest.raises(AttributeError):
images = Images(augment=['brightness='])
with pytest.raises(AttributeError):
images = Images(augment=['brightness=-1'])
with pytest.raises(AttributeError):
images = Images(augment=['brightness=101'])
with pytest.raises(AttributeError):
images = Images(augment=['brightness=abc'])
def test_091(self):
""" Images -next() operator with augmentation """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1], augment=['flip=horizontal'])
images.split = 0.5, 12
# loop thru list twice
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
# next epoch
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1], augment=['flip=vertical'])
images.split = 0.5, 12
# loop thru list twice
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
# next epoch
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1], augment=['flip=both'])
images.split = 0.5, 12
# loop thru list twice
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
# next epoch
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1], augment=['zoom=1.5'])
images.split = 0.5, 12
# loop thru list twice
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
# next epoch
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1], augment=['rotate=-30,60'])
images.split = 0.5, 12
# loop thru list twice
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
# next epoch
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1], augment=['rotate=-30,60', 'flip=vertical', 'zoom=2'])
images.split = 0.5, 12
# loop thru list twice
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
# next epoch
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
def test_092(self):
""" Images - augmentation - minibatch """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1], augment=['flip=vertical'])
images.split = 0.5, 12
images.minibatch = 2
g = images.minibatch
if True:
step = 0
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (2, 128, 128, 3))
self.assertEqual(y_batch.shape, (2, 2))
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
# second batch
elif step == 2:
self.assertEqual(list(y_batch[0]), [0, 1])
self.assertEqual(list(y_batch[1]), [0, 1])
# next epoch
elif step == 3:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
else:
break
def test_093(self):
""" Images - stratify - augmentation """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1], augment=['flip=both'])
images.stratify = 2, 0.5, 12
g = images.stratify
if True:
step = 0
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (4, 128, 128, 3))
self.assertEqual(y_batch.shape, (4, 2))
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
self.assertEqual(list(y_batch[2]), [0, 1])
self.assertEqual(list(y_batch[3]), [0, 1])
# second batch
elif step == 2:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
self.assertEqual(list(y_batch[2]), [0, 1])
self.assertEqual(list(y_batch[3]), [0, 1])
# next epoch
elif step == 3:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
self.assertEqual(list(y_batch[2]), [0, 1])
self.assertEqual(list(y_batch[3]), [0, 1])
else:
break
def test_094(self):
""" Images - augmentation - minibatch - uint8 """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1], config=['uint8'], augment=['flip=vertical'])
images.split = 0.5, 12
images.minibatch = 2
g = images.minibatch
if True:
step = 0
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (2, 128, 128, 3))
self.assertEqual(y_batch.shape, (2, 2))
self.assertEqual(x_batch.dtype, np.float32)
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
# second batch
elif step == 2:
self.assertEqual(list(y_batch[0]), [0, 1])
self.assertEqual(list(y_batch[1]), [0, 1])
# next epoch
elif step == 3:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
else:
break
def test_095(self):
""" Images - stratify - augmentation - uint16 """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1], augment=['flip=both'], config=['uint16'])
images.stratify = 2, 0.5, 12
g = images.stratify
if True:
step = 0
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (4, 128, 128, 3))
self.assertEqual(y_batch.shape, (4, 2))
self.assertEqual(x_batch.dtype, np.float32)
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
self.assertEqual(list(y_batch[2]), [0, 1])
self.assertEqual(list(y_batch[3]), [0, 1])
# second batch
elif step == 2:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
self.assertEqual(list(y_batch[2]), [0, 1])
self.assertEqual(list(y_batch[3]), [0, 1])
# next epoch
elif step == 3:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
self.assertEqual(list(y_batch[2]), [0, 1])
self.assertEqual(list(y_batch[3]), [0, 1])
else:
break
def test_096(self):
""" Images - augmentation - minibatch - uint8 """
images = Images('foo', 'files/fp_urls.csv',
config=['uint8',
'resize=(50,50)',
'label_col=1',
'image_col=0',
'header'],
augment=['flip=both',
'edge',
'zoom=0.5',
'rotate=-30,60',
'denoise',
'brightness=0',
'contrast=1.0'
])
images.split = 0.2, 12
images.minibatch = 2
g = images.minibatch
if True:
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (2, 50, 50, 3))
self.assertEqual(y_batch.shape, (2, 5))
self.assertEqual(x_batch.dtype, np.float32)
break
def test_097(self):
""" Images - stratify - augmentation - uint8 """
images = Images('foo', 'files/fp_urls.csv',
config=['uint8',
'resize=(50,50)',
'label_col=1',
'image_col=0',
'header'],
augment=['flip=both',
'edge',
'zoom=0.5',
'rotate=-30,60',
'denoise',
'brightness=0',
'contrast=1.0'
])
images.stratify = 5, 0.2, 12
g = images.stratify
if True:
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (10, 50, 50, 3))
self.assertEqual(y_batch.shape, (10, 5))
self.assertEqual(x_batch.dtype, np.float32)
break
def test_098(self):
""" Images - augmentation - minibatch - float32 """
images = Images('foo', 'files/fp_urls.csv',
config=['float32',
'resize=(50,50)',
'label_col=1',
'image_col=0',
'header'],
augment=['flip=both',
'edge',
'zoom=0.5',
'rotate=-30,60',
'denoise',
'brightness=0',
'contrast=1.0'
])
images.split = 0.2, 12
images.minibatch = 2
g = images.minibatch
if True:
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (2, 50, 50, 3))
self.assertEqual(y_batch.shape, (2, 5))
self.assertEqual(x_batch.dtype, np.float32)
break
def test_099(self):
""" Images - stratify - augmentation - float32 """
images = Images('foo', 'files/fp_urls.csv',
config=['float32',
'resize=(50,50)',
'label_col=1',
'image_col=0',
'header'],
augment=['flip=both',
'edge',
'zoom=0.5',
'rotate=-30,60',
'denoise',
'brightness=0',
'contrast=1.0'
])
images.stratify = 5, 0.2, 12
# First batch
g = images.stratify
if True:
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (10, 50, 50, 3))
self.assertEqual(y_batch.shape, (10, 5))
self.assertEqual(x_batch.dtype, np.float32)
break
def test_100(self):
""" Images - augmentation - minibatch - float16 """
images = Images('foo', 'files/fp_urls.csv',
config=['float16',
'resize=(50,50)',
'label_col=1',
'image_col=0',
'header'],
augment=['flip=both',
'edge',
'zoom=0.5',
'rotate=-30,60',
'denoise',
'brightness=0',
'contrast=1.0'
])
images.split = 0.2, 12
images.minibatch = 2
g = images.minibatch
if True:
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (2, 50, 50, 3))
self.assertEqual(y_batch.shape, (2, 5))
self.assertEqual(x_batch.dtype, np.float16)
break
def test_101(self):
""" Images - stratify - augmentation - float16 """
images = Images('foo', 'files/fp_urls.csv',
config=['float16',
'resize=(50,50)',
'label_col=1',
'image_col=0',
'header'],
augment=['flip=both',
'edge',
'zoom=0.5',
'rotate=-30,60',
'denoise',
'brightness=0',
'contrast=1.0'
])
images.stratify = 5, 0.2, 12
# First batch
g = images.stratify
if True:
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (10, 50, 50, 3))
self.assertEqual(y_batch.shape, (10, 5))
self.assertEqual(x_batch.dtype, np.float16)
break
def test_102(self):
""" Images - memory - load/stream """
# one class
a = cv2.imread('files/1.jpg', cv2.IMREAD_COLOR)
c = np.asarray([a, a, a, a, a])
images = Images('foo', c, 0, config=['resize=(30,50)', 'store'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'0': 0})
self.assertEqual(images.images[0][0].shape, (30, 50, 3))
self.assertEqual(images.labels[0][0], 0)
# one class, load
images = Images(config=['stream'])
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 0)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {'0': 0})
self.assertEqual(images.labels[0][0], 0)
# temp
images._hf.close()
# multi-class
images = Images('foo', c, [0, 1, 0, 1, 1], config=['resize=(30,50)', 'store'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 2)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(images.images[0][0].shape, (30, 50, 3))
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
# multi class, load
images = Images(config=['stream'])
images.load('foo')
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 5)
self.assertEqual(images.shape, (30, 50))
self.assertEqual(len(images.images), 0)
self.assertEqual(len(images.labels), 2)
self.assertEqual(images.classes, {'0': 0, '1': 1})
self.assertEqual(images.labels[0][0], 0)
self.assertEqual(images.labels[1][0], 1)
# temp
images._hf.close()
os.remove('foo.h5')
def test_103(self):
""" Images - split - stream """
a = cv2.imread('files/1.jpg', cv2.IMREAD_COLOR)
c = np.asarray([a, a, a, a, a])
images = Images('foo', c, [0, 1, 0, 1, 1], config=['resize=(30,50)', 'stream'])
images.split = 0.5
with pytest.raises(AttributeError):
x_train, x_test, y_train, y_test = images.split
images = Images(config=['stream'])
images.load('foo')
with pytest.raises(AttributeError):
x_train, x_test, y_train, y_test = images.split
# temp
images._hf.close()
os.remove('foo.h5')
def test_104(self):
""" Images - minibatch - stream """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1], config=['stream'])
# during store
images.split = 0.5, 12
images.minibatch = 2
if True:
step = 0
g = images.minibatch
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (2, 128, 128, 3))
self.assertEqual(y_batch.shape, (2, 2))
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
# second batch
elif step == 2:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
# next epoch
elif step == 3:
self.assertEqual(list(y_batch[0]), [0, 1])
self.assertEqual(list(y_batch[1]), [1, 0])
else:
break
# temp
images._hf.close()
# during load
images = Images(config=['stream'])
images.load('foo')
images.split = 0.5, 12
images.minibatch = 2
if True:
step = 0
g = images.minibatch
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (2, 128, 128, 3))
self.assertEqual(y_batch.shape, (2, 2))
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
# second batch
elif step == 2:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
# next epoch
elif step == 3:
self.assertEqual(list(y_batch[0]), [0, 1])
self.assertEqual(list(y_batch[1]), [1, 0])
else:
break
# temp
images._hf.close()
os.remove('foo.h5')
def test_105(self):
""" Images - augmentation - minibatch - stream """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1], augment=['flip=vertical'], config=['stream'])
images.split = 0.5, 12
images.minibatch = 2
g = images.minibatch
if True:
step = 0
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (2, 128, 128, 3))
self.assertEqual(y_batch.shape, (2, 2))
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
# second batch
elif step == 2:
self.assertEqual(list(y_batch[0]), [0, 1])
self.assertEqual(list(y_batch[1]), [0, 1])
# next epoch
elif step == 3:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
else:
break
# temp
images._hf.close()
# during load
images = Images(config=['stream'], augment=['flip=vertical'])
images.load('foo')
images.split = 0.5, 12
images.minibatch = 2
g = images.minibatch
if True:
step = 0
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (2, 128, 128, 3))
self.assertEqual(y_batch.shape, (2, 2))
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
# second batch
elif step == 2:
self.assertEqual(list(y_batch[0]), [0, 1])
self.assertEqual(list(y_batch[1]), [0, 1])
# next epoch
elif step == 3:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
else:
break
# temp
images._hf.close()
os.remove('foo.h5')
def test_106(self):
""" Images - stratify - stream """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1], config=['stream'])
images.stratify = 2, 0.5, 12
g = images.stratify
if True:
step = 0
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (2, 128, 128, 3))
self.assertEqual(y_batch.shape, (2, 2))
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
elif step == 2:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
elif step == 3:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
else:
break
# temp
images._hf.close()
# during load
images = Images(config=['stream'])
images.load('foo')
images.stratify = 2, 0.5, 12
g = images.stratify
if True:
step = 0
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (2, 128, 128, 3))
self.assertEqual(y_batch.shape, (2, 2))
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
elif step == 2:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
elif step == 3:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [0, 1])
else:
break
# temp
images._hf.close()
os.remove('foo.h5')
def test_107(self):
""" Images - stratify - augmentation - stream """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg',
'files/8rgb.png', 'files/1.jpg', 'files/2.jpg', 'files/3.jpg'],
[0, 0, 0, 0, 1, 1, 1, 1], augment=['flip=both'], config=['stream'])
images.stratify = 2, 0.5, 12
g = images.stratify
if True:
step = 0
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (4, 128, 128, 3))
self.assertEqual(y_batch.shape, (4, 2))
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
self.assertEqual(list(y_batch[2]), [0, 1])
self.assertEqual(list(y_batch[3]), [0, 1])
elif step == 2:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
self.assertEqual(list(y_batch[2]), [0, 1])
self.assertEqual(list(y_batch[3]), [0, 1])
elif step == 3:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
self.assertEqual(list(y_batch[2]), [0, 1])
self.assertEqual(list(y_batch[3]), [0, 1])
else:
break
# temp
images._hf.close()
# during load
images = Images(config=['stream'], augment=['flip=both'])
images.load('foo')
images.stratify = 2, 0.5, 12
g = images.stratify
if True:
step = 0
for x_batch, y_batch in g:
self.assertEqual(x_batch.shape, (4, 128, 128, 3))
self.assertEqual(y_batch.shape, (4, 2))
step += 1
# first batch
if step == 1:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
self.assertEqual(list(y_batch[2]), [0, 1])
self.assertEqual(list(y_batch[3]), [0, 1])
elif step == 2:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
self.assertEqual(list(y_batch[2]), [0, 1])
self.assertEqual(list(y_batch[3]), [0, 1])
elif step == 3:
self.assertEqual(list(y_batch[0]), [1, 0])
self.assertEqual(list(y_batch[1]), [1, 0])
self.assertEqual(list(y_batch[2]), [0, 1])
self.assertEqual(list(y_batch[3]), [0, 1])
else:
break
# temp
images._hf.close()
os.remove('foo.h5')
def test_108(self):
""" Images -next() operator - stream """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1], config=['float16', 'stream'])
images.split = 0.5, 12
# loop thru list twice
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float16)
self.assertEqual(next(images), (None, None))
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float16)
# temp
images._hf.close()
# load
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1], config=['float16', 'store'])
images = Images()
images.load('foo')
images.split = 0.5, 12
# loop thru list twice
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float16)
self.assertEqual(next(images), (None, None))
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float16)
# temp
images._hf.close()
# during load
images = Images(config=['stream'])
images.load('foo')
images.split = 0.5, 12
# loop thru list twice
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float16)
self.assertEqual(next(images), (None, None))
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float16)
# temp
images._hf.close()
# load
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1], config=['float16', 'store'])
images = Images()
images.load('foo')
images.split = 0.5, 12
# loop thru list twice
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float16)
self.assertEqual(next(images), (None, None))
for _ in range(2):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(x.dtype, np.float16)
# temp
images._hf.close()
os.remove('foo.h5')
def test_109(self):
""" Images -next() operator with augmentation - stream """
images = Images('foo',
['files/1.jpg', 'files/2.jpg', 'files/3.jpg', 'files/8rgb.jpg'],
[0, 0, 1, 1], augment=['flip=horizontal'], config=['stream'])
images.split = 0.5, 12
# loop thru list twice
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
# next epoch
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
# temp
images._hf.close()
# during load
images = Images(config=['stream'], augment=['flip=horizontal'])
images.load('foo')
images.split = 0.5, 12
# loop thru list twice
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
# next epoch
for _ in range(4):
x, y = next(images)
self.assertEqual(x.shape, (128, 128, 3))
self.assertEqual(y.shape, (2,))
self.assertEqual(next(images), (None, None))
# temp
images._hf.close()
os.remove('foo.h5')
def test_110(self):
""" data type when streaming """
images = Images('foo', ['files/1.jpg'], 0, config=['store', 'uint8'])
x1 = images._data[0][0][0][0][0]
images = Images('foo', ['files/1.jpg'], 0, config=['stream', 'uint8'])
images = Images()
images.load('foo')
x2 = images._data[0][0][0][0][0]
self.assertEqual(x1, x2)
images = Images('foo', ['files/1.jpg'], 0, config=['store', 'uint16'])
x1 = images._data[0][0][0][0][0]
images = Images('foo', ['files/1.jpg'], 0, config=['stream', 'uint16'])
images = Images()
images.load('foo')
x2 = images._data[0][0][0][0][0]
self.assertEqual(x1, x2)
images = Images('foo', ['files/1.jpg'], 0, config=['store', 'float16'])
x1 = images._data[0][0][0][0][0]
images = Images('foo', ['files/1.jpg'], 0, config=['stream', 'float16'])
images = Images()
images.load('foo')
x2 = images._data[0][0][0][0][0]
self.assertEqual(x1, x2)
images = Images('foo', ['files/1.jpg'], 0, config=['store', 'float32'])
x1 = images._data[0][0][0][0][0]
images = Images('foo', ['files/1.jpg'], 0, config=['stream', 'float32'])
images = Images()
images.load('foo')
x2 = images._data[0][0][0][0][0]
self.assertEqual(x1, x2)
images = Images('foo', ['files/1.jpg'], 0, config=['store', 'float64'])
x1 = images._data[0][0][0][0][0]
images = Images('foo', ['files/1.jpg'], 0, config=['stream', 'float64'])
images = Images()
images.load('foo')
x2 = images._data[0][0][0][0][0]
self.assertEqual(x1, x2)
os.remove('foo.h5')
def test_111(self):
''' memory labels - empty '''
a = cv2.imread('files/1.jpg', cv2.IMREAD_GRAYSCALE)
# list
i = np.asarray([a])
with pytest.raises(AttributeError):
images = Images('foo', i, [], config=['resize=(50,50)'])
# numpy array
l = np.asarray([])
with pytest.raises(AttributeError):
images = Images('foo', i, l, config=['resize=(50,50)'])
def test_112(self):
''' memory numpy - different int types for labels '''
# uint8
a = cv2.imread('files/1.jpg', cv2.IMREAD_GRAYSCALE)
i = np.asarray([a])
l = np.asarray([0]).astype(np.uint8)
images = Images('foo', i, l, config=['resize=(50,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(images.labels[0][0], 0)
# uint16
a = cv2.imread('files/1.jpg', cv2.IMREAD_GRAYSCALE)
i = np.asarray([a])
l = np.asarray([0]).astype(np.uint16)
images = Images('foo', i, l, config=['resize=(50,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(images.labels[0][0], 0)
# uint32
a = cv2.imread('files/1.jpg', cv2.IMREAD_GRAYSCALE)
i = np.asarray([a])
l = np.asarray([0]).astype(np.uint32)
images = Images('foo', i, l, config=['resize=(50,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(images.labels[0][0], 0)
# int8
a = cv2.imread('files/1.jpg', cv2.IMREAD_GRAYSCALE)
i = np.asarray([a])
l = np.asarray([0]).astype(np.int8)
images = Images('foo', i, l, config=['resize=(50,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(images.labels[0][0], 0)
# int16
a = cv2.imread('files/1.jpg', cv2.IMREAD_GRAYSCALE)
i = np.asarray([a])
l = np.asarray([0]).astype(np.uint16)
images = Images('foo', i, l, config=['resize=(50,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(images.labels[0][0], 0)
# int32
a = cv2.imread('files/1.jpg', cv2.IMREAD_GRAYSCALE)
i = np.asarray([a])
l = np.asarray([0]).astype(np.int32)
images = Images('foo', i, l, config=['resize=(50,50)'])
self.assertEqual(images.fail, 0)
self.assertEqual(images.errors, [])
self.assertEqual(images.count, 1)
self.assertEqual(images.shape, (50, 50))
self.assertEqual(len(images.images), 1)
self.assertEqual(len(images.labels), 1)
self.assertEqual(images.classes, {0: 0})
self.assertEqual(images.labels[0][0], 0)
def test_113(self):
''' resize= None '''
pass
| 44.084287 | 101 | 0.528909 | 33,833 | 285,049 | 4.428753 | 0.016641 | 0.309735 | 0.193549 | 0.207584 | 0.950894 | 0.94141 | 0.923718 | 0.909262 | 0.894446 | 0.878749 | 0 | 0.054892 | 0.310347 | 285,049 | 6,465 | 102 | 44.091106 | 0.707312 | 0.039769 | 0 | 0.885443 | 0 | 0.002371 | 0.078074 | 0.000898 | 0 | 0 | 0 | 0.000155 | 0.572236 | 1 | 0.021525 | false | 0.00073 | 0.001642 | 0 | 0.023714 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
7b65fd3c3664deca059a74cf087d0fca50ac27da | 276,731 | py | Python | sdk/python/pulumi_sakuracloud/_inputs.py | sacloud/pulumi-sakuracloud | 3eff14c6ec8ef4ad6422e0cdf15585df67eb4d6e | [
"ECL-2.0",
"Apache-2.0"
] | 6 | 2019-12-07T07:46:05.000Z | 2020-12-19T02:41:42.000Z | sdk/python/pulumi_sakuracloud/_inputs.py | sacloud/pulumi-sakuracloud | 3eff14c6ec8ef4ad6422e0cdf15585df67eb4d6e | [
"ECL-2.0",
"Apache-2.0"
] | 5 | 2019-09-11T04:41:06.000Z | 2021-10-19T07:50:34.000Z | sdk/python/pulumi_sakuracloud/_inputs.py | sacloud/pulumi-sakuracloud | 3eff14c6ec8ef4ad6422e0cdf15585df67eb4d6e | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2019-09-08T05:38:16.000Z | 2021-06-24T01:32:47.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = [
'CertificateAuthorityClientArgs',
'CertificateAuthorityClientSubjectArgs',
'CertificateAuthorityServerArgs',
'CertificateAuthorityServerSubjectArgs',
'CertificateAuthoritySubjectArgs',
'ContainerRegistryUserArgs',
'DNSRecordArgs',
'DatabaseBackupArgs',
'DatabaseNetworkInterfaceArgs',
'DatabaseReadReplicaNetworkInterfaceArgs',
'GSLBHealthCheckArgs',
'GSLBServerArgs',
'LoadBalancerNetworkInterfaceArgs',
'LoadBalancerVipArgs',
'LoadBalancerVipServerArgs',
'LocalRouterNetworkInterfaceArgs',
'LocalRouterPeerArgs',
'LocalRouterStaticRouteArgs',
'LocalRouterSwitchArgs',
'MobileGatewayPrivateNetworkInterfaceArgs',
'MobileGatewaySimArgs',
'MobileGatewaySimRouteArgs',
'MobileGatewayStaticRouteArgs',
'MobileGatewayTrafficControlArgs',
'NFSNetworkInterfaceArgs',
'PacketFilterExpressionArgs',
'PacketFilterRuleExpressionArgs',
'ProxyLBACMECertificateArgs',
'ProxyLBACMECertificateAdditionalCertificateArgs',
'ProxyLBBindPortArgs',
'ProxyLBBindPortResponseHeaderArgs',
'ProxyLBCertificateArgs',
'ProxyLBCertificateAdditionalCertificateArgs',
'ProxyLBHealthCheckArgs',
'ProxyLBRuleArgs',
'ProxyLBServerArgs',
'ProxyLBSorryServerArgs',
'ProxyLBSyslogArgs',
'ServerDiskEditParameterArgs',
'ServerDiskEditParameterNoteArgs',
'ServerNetworkInterfaceArgs',
'SimpleMonitorHealthCheckArgs',
'VPCRouterDhcpServerArgs',
'VPCRouterDhcpStaticMappingArgs',
'VPCRouterFirewallArgs',
'VPCRouterFirewallExpressionArgs',
'VPCRouterL2tpArgs',
'VPCRouterPortForwardingArgs',
'VPCRouterPptpArgs',
'VPCRouterPrivateNetworkInterfaceArgs',
'VPCRouterPublicNetworkInterfaceArgs',
'VPCRouterSiteToSiteVpnArgs',
'VPCRouterStaticNatArgs',
'VPCRouterStaticRouteArgs',
'VPCRouterUserArgs',
'VPCRouterWireGuardArgs',
'VPCRouterWireGuardPeerArgs',
'GetArchiveFilterArgs',
'GetArchiveFilterConditionArgs',
'GetBridgeFilterArgs',
'GetBridgeFilterConditionArgs',
'GetCDROMFilterArgs',
'GetCDROMFilterConditionArgs',
'GetCertificateAuthorityFilterArgs',
'GetCertificateAuthorityFilterConditionArgs',
'GetContainerRegistryFilterArgs',
'GetContainerRegistryFilterConditionArgs',
'GetDNSFilterArgs',
'GetDNSFilterConditionArgs',
'GetDatabaseFilterArgs',
'GetDatabaseFilterConditionArgs',
'GetDiskFilterArgs',
'GetDiskFilterConditionArgs',
'GetESMEFilterArgs',
'GetESMEFilterConditionArgs',
'GetEnhancedDBFilterArgs',
'GetEnhancedDBFilterConditionArgs',
'GetGSLBFilterArgs',
'GetGSLBFilterConditionArgs',
'GetIconFilterArgs',
'GetIconFilterConditionArgs',
'GetInternetFilterArgs',
'GetInternetFilterConditionArgs',
'GetLoadBalancerFilterArgs',
'GetLoadBalancerFilterConditionArgs',
'GetLocalRouterFilterArgs',
'GetLocalRouterFilterConditionArgs',
'GetNFSFilterArgs',
'GetNFSFilterConditionArgs',
'GetNoteFilterArgs',
'GetNoteFilterConditionArgs',
'GetPacketFilterFilterArgs',
'GetPacketFilterFilterConditionArgs',
'GetPrivateHostFilterArgs',
'GetPrivateHostFilterConditionArgs',
'GetProxyLBFilterArgs',
'GetProxyLBFilterConditionArgs',
'GetSSHKeyFilterArgs',
'GetSSHKeyFilterConditionArgs',
'GetServerFilterArgs',
'GetServerFilterConditionArgs',
'GetSimpleMonitorFilterArgs',
'GetSimpleMonitorFilterConditionArgs',
'GetSwitchFilterArgs',
'GetSwitchFilterConditionArgs',
'GetVPCRouterFilterArgs',
'GetVPCRouterFilterConditionArgs',
]
@pulumi.input_type
class CertificateAuthorityClientArgs:
def __init__(__self__, *,
subject: pulumi.Input['CertificateAuthorityClientSubjectArgs'],
validity_period_hours: pulumi.Input[int],
certificate: Optional[pulumi.Input[str]] = None,
csr: Optional[pulumi.Input[str]] = None,
email: Optional[pulumi.Input[str]] = None,
hold: Optional[pulumi.Input[bool]] = None,
id: Optional[pulumi.Input[str]] = None,
issue_state: Optional[pulumi.Input[str]] = None,
not_after: Optional[pulumi.Input[str]] = None,
not_before: Optional[pulumi.Input[str]] = None,
public_key: Optional[pulumi.Input[str]] = None,
serial_number: Optional[pulumi.Input[str]] = None,
url: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input['CertificateAuthorityClientSubjectArgs'] subject: A `subject` block as defined below.
:param pulumi.Input[int] validity_period_hours: The number of hours after initial issuing that the certificate will become invalid.
:param pulumi.Input[str] certificate: The body of the CA's certificate in PEM format.
:param pulumi.Input[str] csr: Input for issuing a certificate.
:param pulumi.Input[str] email: Input for issuing a certificate.
:param pulumi.Input[bool] hold: Flag to suspend/hold the certificate.
:param pulumi.Input[str] id: The id of the certificate.
:param pulumi.Input[str] issue_state: Current state of the certificate.
:param pulumi.Input[str] not_after: The date on which the certificate validity period ends, in RFC3339 format.
:param pulumi.Input[str] not_before: The date on which the certificate validity period begins, in RFC3339 format.
:param pulumi.Input[str] public_key: Input for issuing a certificate.
:param pulumi.Input[str] serial_number: The body of the CA's certificate in PEM format.
:param pulumi.Input[str] url: The URL for issuing the certificate.
"""
pulumi.set(__self__, "subject", subject)
pulumi.set(__self__, "validity_period_hours", validity_period_hours)
if certificate is not None:
pulumi.set(__self__, "certificate", certificate)
if csr is not None:
pulumi.set(__self__, "csr", csr)
if email is not None:
pulumi.set(__self__, "email", email)
if hold is not None:
pulumi.set(__self__, "hold", hold)
if id is not None:
pulumi.set(__self__, "id", id)
if issue_state is not None:
pulumi.set(__self__, "issue_state", issue_state)
if not_after is not None:
pulumi.set(__self__, "not_after", not_after)
if not_before is not None:
pulumi.set(__self__, "not_before", not_before)
if public_key is not None:
pulumi.set(__self__, "public_key", public_key)
if serial_number is not None:
pulumi.set(__self__, "serial_number", serial_number)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def subject(self) -> pulumi.Input['CertificateAuthorityClientSubjectArgs']:
"""
A `subject` block as defined below.
"""
return pulumi.get(self, "subject")
@subject.setter
def subject(self, value: pulumi.Input['CertificateAuthorityClientSubjectArgs']):
pulumi.set(self, "subject", value)
@property
@pulumi.getter(name="validityPeriodHours")
def validity_period_hours(self) -> pulumi.Input[int]:
"""
The number of hours after initial issuing that the certificate will become invalid.
"""
return pulumi.get(self, "validity_period_hours")
@validity_period_hours.setter
def validity_period_hours(self, value: pulumi.Input[int]):
pulumi.set(self, "validity_period_hours", value)
@property
@pulumi.getter
def certificate(self) -> Optional[pulumi.Input[str]]:
"""
The body of the CA's certificate in PEM format.
"""
return pulumi.get(self, "certificate")
@certificate.setter
def certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate", value)
@property
@pulumi.getter
def csr(self) -> Optional[pulumi.Input[str]]:
"""
Input for issuing a certificate.
"""
return pulumi.get(self, "csr")
@csr.setter
def csr(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "csr", value)
@property
@pulumi.getter
def email(self) -> Optional[pulumi.Input[str]]:
"""
Input for issuing a certificate.
"""
return pulumi.get(self, "email")
@email.setter
def email(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "email", value)
@property
@pulumi.getter
def hold(self) -> Optional[pulumi.Input[bool]]:
"""
Flag to suspend/hold the certificate.
"""
return pulumi.get(self, "hold")
@hold.setter
def hold(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "hold", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The id of the certificate.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="issueState")
def issue_state(self) -> Optional[pulumi.Input[str]]:
"""
Current state of the certificate.
"""
return pulumi.get(self, "issue_state")
@issue_state.setter
def issue_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "issue_state", value)
@property
@pulumi.getter(name="notAfter")
def not_after(self) -> Optional[pulumi.Input[str]]:
"""
The date on which the certificate validity period ends, in RFC3339 format.
"""
return pulumi.get(self, "not_after")
@not_after.setter
def not_after(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "not_after", value)
@property
@pulumi.getter(name="notBefore")
def not_before(self) -> Optional[pulumi.Input[str]]:
"""
The date on which the certificate validity period begins, in RFC3339 format.
"""
return pulumi.get(self, "not_before")
@not_before.setter
def not_before(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "not_before", value)
@property
@pulumi.getter(name="publicKey")
def public_key(self) -> Optional[pulumi.Input[str]]:
"""
Input for issuing a certificate.
"""
return pulumi.get(self, "public_key")
@public_key.setter
def public_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "public_key", value)
@property
@pulumi.getter(name="serialNumber")
def serial_number(self) -> Optional[pulumi.Input[str]]:
"""
The body of the CA's certificate in PEM format.
"""
return pulumi.get(self, "serial_number")
@serial_number.setter
def serial_number(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "serial_number", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
The URL for issuing the certificate.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@pulumi.input_type
class CertificateAuthorityClientSubjectArgs:
def __init__(__self__, *,
common_name: pulumi.Input[str],
country: pulumi.Input[str],
organization: pulumi.Input[str],
organization_units: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] common_name: .
:param pulumi.Input[str] country: .
:param pulumi.Input[str] organization: .
:param pulumi.Input[Sequence[pulumi.Input[str]]] organization_units: .
"""
pulumi.set(__self__, "common_name", common_name)
pulumi.set(__self__, "country", country)
pulumi.set(__self__, "organization", organization)
if organization_units is not None:
pulumi.set(__self__, "organization_units", organization_units)
@property
@pulumi.getter(name="commonName")
def common_name(self) -> pulumi.Input[str]:
"""
.
"""
return pulumi.get(self, "common_name")
@common_name.setter
def common_name(self, value: pulumi.Input[str]):
pulumi.set(self, "common_name", value)
@property
@pulumi.getter
def country(self) -> pulumi.Input[str]:
"""
.
"""
return pulumi.get(self, "country")
@country.setter
def country(self, value: pulumi.Input[str]):
pulumi.set(self, "country", value)
@property
@pulumi.getter
def organization(self) -> pulumi.Input[str]:
"""
.
"""
return pulumi.get(self, "organization")
@organization.setter
def organization(self, value: pulumi.Input[str]):
pulumi.set(self, "organization", value)
@property
@pulumi.getter(name="organizationUnits")
def organization_units(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
.
"""
return pulumi.get(self, "organization_units")
@organization_units.setter
def organization_units(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "organization_units", value)
@pulumi.input_type
class CertificateAuthorityServerArgs:
def __init__(__self__, *,
subject: pulumi.Input['CertificateAuthorityServerSubjectArgs'],
validity_period_hours: pulumi.Input[int],
certificate: Optional[pulumi.Input[str]] = None,
csr: Optional[pulumi.Input[str]] = None,
hold: Optional[pulumi.Input[bool]] = None,
id: Optional[pulumi.Input[str]] = None,
issue_state: Optional[pulumi.Input[str]] = None,
not_after: Optional[pulumi.Input[str]] = None,
not_before: Optional[pulumi.Input[str]] = None,
public_key: Optional[pulumi.Input[str]] = None,
serial_number: Optional[pulumi.Input[str]] = None,
subject_alternative_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input['CertificateAuthorityServerSubjectArgs'] subject: A `subject` block as defined below.
:param pulumi.Input[int] validity_period_hours: The number of hours after initial issuing that the certificate will become invalid.
:param pulumi.Input[str] certificate: The body of the CA's certificate in PEM format.
:param pulumi.Input[str] csr: Input for issuing a certificate.
:param pulumi.Input[bool] hold: Flag to suspend/hold the certificate.
:param pulumi.Input[str] id: The id of the certificate.
:param pulumi.Input[str] issue_state: Current state of the certificate.
:param pulumi.Input[str] not_after: The date on which the certificate validity period ends, in RFC3339 format.
:param pulumi.Input[str] not_before: The date on which the certificate validity period begins, in RFC3339 format.
:param pulumi.Input[str] public_key: Input for issuing a certificate.
:param pulumi.Input[str] serial_number: The body of the CA's certificate in PEM format.
:param pulumi.Input[Sequence[pulumi.Input[str]]] subject_alternative_names: .
"""
pulumi.set(__self__, "subject", subject)
pulumi.set(__self__, "validity_period_hours", validity_period_hours)
if certificate is not None:
pulumi.set(__self__, "certificate", certificate)
if csr is not None:
pulumi.set(__self__, "csr", csr)
if hold is not None:
pulumi.set(__self__, "hold", hold)
if id is not None:
pulumi.set(__self__, "id", id)
if issue_state is not None:
pulumi.set(__self__, "issue_state", issue_state)
if not_after is not None:
pulumi.set(__self__, "not_after", not_after)
if not_before is not None:
pulumi.set(__self__, "not_before", not_before)
if public_key is not None:
pulumi.set(__self__, "public_key", public_key)
if serial_number is not None:
pulumi.set(__self__, "serial_number", serial_number)
if subject_alternative_names is not None:
pulumi.set(__self__, "subject_alternative_names", subject_alternative_names)
@property
@pulumi.getter
def subject(self) -> pulumi.Input['CertificateAuthorityServerSubjectArgs']:
"""
A `subject` block as defined below.
"""
return pulumi.get(self, "subject")
@subject.setter
def subject(self, value: pulumi.Input['CertificateAuthorityServerSubjectArgs']):
pulumi.set(self, "subject", value)
@property
@pulumi.getter(name="validityPeriodHours")
def validity_period_hours(self) -> pulumi.Input[int]:
"""
The number of hours after initial issuing that the certificate will become invalid.
"""
return pulumi.get(self, "validity_period_hours")
@validity_period_hours.setter
def validity_period_hours(self, value: pulumi.Input[int]):
pulumi.set(self, "validity_period_hours", value)
@property
@pulumi.getter
def certificate(self) -> Optional[pulumi.Input[str]]:
"""
The body of the CA's certificate in PEM format.
"""
return pulumi.get(self, "certificate")
@certificate.setter
def certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate", value)
@property
@pulumi.getter
def csr(self) -> Optional[pulumi.Input[str]]:
"""
Input for issuing a certificate.
"""
return pulumi.get(self, "csr")
@csr.setter
def csr(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "csr", value)
@property
@pulumi.getter
def hold(self) -> Optional[pulumi.Input[bool]]:
"""
Flag to suspend/hold the certificate.
"""
return pulumi.get(self, "hold")
@hold.setter
def hold(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "hold", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The id of the certificate.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="issueState")
def issue_state(self) -> Optional[pulumi.Input[str]]:
"""
Current state of the certificate.
"""
return pulumi.get(self, "issue_state")
@issue_state.setter
def issue_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "issue_state", value)
@property
@pulumi.getter(name="notAfter")
def not_after(self) -> Optional[pulumi.Input[str]]:
"""
The date on which the certificate validity period ends, in RFC3339 format.
"""
return pulumi.get(self, "not_after")
@not_after.setter
def not_after(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "not_after", value)
@property
@pulumi.getter(name="notBefore")
def not_before(self) -> Optional[pulumi.Input[str]]:
"""
The date on which the certificate validity period begins, in RFC3339 format.
"""
return pulumi.get(self, "not_before")
@not_before.setter
def not_before(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "not_before", value)
@property
@pulumi.getter(name="publicKey")
def public_key(self) -> Optional[pulumi.Input[str]]:
"""
Input for issuing a certificate.
"""
return pulumi.get(self, "public_key")
@public_key.setter
def public_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "public_key", value)
@property
@pulumi.getter(name="serialNumber")
def serial_number(self) -> Optional[pulumi.Input[str]]:
"""
The body of the CA's certificate in PEM format.
"""
return pulumi.get(self, "serial_number")
@serial_number.setter
def serial_number(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "serial_number", value)
@property
@pulumi.getter(name="subjectAlternativeNames")
def subject_alternative_names(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
.
"""
return pulumi.get(self, "subject_alternative_names")
@subject_alternative_names.setter
def subject_alternative_names(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "subject_alternative_names", value)
@pulumi.input_type
class CertificateAuthorityServerSubjectArgs:
def __init__(__self__, *,
common_name: pulumi.Input[str],
country: pulumi.Input[str],
organization: pulumi.Input[str],
organization_units: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] common_name: .
:param pulumi.Input[str] country: .
:param pulumi.Input[str] organization: .
:param pulumi.Input[Sequence[pulumi.Input[str]]] organization_units: .
"""
pulumi.set(__self__, "common_name", common_name)
pulumi.set(__self__, "country", country)
pulumi.set(__self__, "organization", organization)
if organization_units is not None:
pulumi.set(__self__, "organization_units", organization_units)
@property
@pulumi.getter(name="commonName")
def common_name(self) -> pulumi.Input[str]:
"""
.
"""
return pulumi.get(self, "common_name")
@common_name.setter
def common_name(self, value: pulumi.Input[str]):
pulumi.set(self, "common_name", value)
@property
@pulumi.getter
def country(self) -> pulumi.Input[str]:
"""
.
"""
return pulumi.get(self, "country")
@country.setter
def country(self, value: pulumi.Input[str]):
pulumi.set(self, "country", value)
@property
@pulumi.getter
def organization(self) -> pulumi.Input[str]:
"""
.
"""
return pulumi.get(self, "organization")
@organization.setter
def organization(self, value: pulumi.Input[str]):
pulumi.set(self, "organization", value)
@property
@pulumi.getter(name="organizationUnits")
def organization_units(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
.
"""
return pulumi.get(self, "organization_units")
@organization_units.setter
def organization_units(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "organization_units", value)
@pulumi.input_type
class CertificateAuthoritySubjectArgs:
def __init__(__self__, *,
common_name: pulumi.Input[str],
country: pulumi.Input[str],
organization: pulumi.Input[str],
organization_units: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] common_name: .
:param pulumi.Input[str] country: .
:param pulumi.Input[str] organization: .
:param pulumi.Input[Sequence[pulumi.Input[str]]] organization_units: .
"""
pulumi.set(__self__, "common_name", common_name)
pulumi.set(__self__, "country", country)
pulumi.set(__self__, "organization", organization)
if organization_units is not None:
pulumi.set(__self__, "organization_units", organization_units)
@property
@pulumi.getter(name="commonName")
def common_name(self) -> pulumi.Input[str]:
"""
.
"""
return pulumi.get(self, "common_name")
@common_name.setter
def common_name(self, value: pulumi.Input[str]):
pulumi.set(self, "common_name", value)
@property
@pulumi.getter
def country(self) -> pulumi.Input[str]:
"""
.
"""
return pulumi.get(self, "country")
@country.setter
def country(self, value: pulumi.Input[str]):
pulumi.set(self, "country", value)
@property
@pulumi.getter
def organization(self) -> pulumi.Input[str]:
"""
.
"""
return pulumi.get(self, "organization")
@organization.setter
def organization(self, value: pulumi.Input[str]):
pulumi.set(self, "organization", value)
@property
@pulumi.getter(name="organizationUnits")
def organization_units(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
.
"""
return pulumi.get(self, "organization_units")
@organization_units.setter
def organization_units(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "organization_units", value)
@pulumi.input_type
class ContainerRegistryUserArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
password: pulumi.Input[str],
permission: pulumi.Input[str]):
"""
:param pulumi.Input[str] name: The user name used to authenticate remote access.
:param pulumi.Input[str] password: The password used to authenticate remote access.
:param pulumi.Input[str] permission: The level of access that allow to the user. This must be one of [`all`/`readwrite`/`readonly`].
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "permission", permission)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The user name used to authenticate remote access.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def password(self) -> pulumi.Input[str]:
"""
The password used to authenticate remote access.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: pulumi.Input[str]):
pulumi.set(self, "password", value)
@property
@pulumi.getter
def permission(self) -> pulumi.Input[str]:
"""
The level of access that allow to the user. This must be one of [`all`/`readwrite`/`readonly`].
"""
return pulumi.get(self, "permission")
@permission.setter
def permission(self, value: pulumi.Input[str]):
pulumi.set(self, "permission", value)
@pulumi.input_type
class DNSRecordArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
type: pulumi.Input[str],
value: pulumi.Input[str],
port: Optional[pulumi.Input[int]] = None,
priority: Optional[pulumi.Input[int]] = None,
ttl: Optional[pulumi.Input[int]] = None,
weight: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] name: The name of the DNS Record. The length of this value must be in the range [`1`-`64`].
:param pulumi.Input[str] type: The type of DNS Record. This must be one of [`A`/`AAAA`/`ALIAS`/`CNAME`/`NS`/`MX`/`TXT`/`SRV`/`CAA`/`PTR`].
:param pulumi.Input[str] value: The value of the DNS Record.
:param pulumi.Input[int] port: The number of port. This must be in the range [`1`-`65535`].
:param pulumi.Input[int] priority: The priority of target DNS Record. This must be in the range [`0`-`65535`].
:param pulumi.Input[int] ttl: The number of the TTL.
:param pulumi.Input[int] weight: The weight of target DNS Record. This must be in the range [`0`-`65535`].
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "type", type)
pulumi.set(__self__, "value", value)
if port is not None:
pulumi.set(__self__, "port", port)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if ttl is not None:
pulumi.set(__self__, "ttl", ttl)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the DNS Record. The length of this value must be in the range [`1`-`64`].
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The type of DNS Record. This must be one of [`A`/`AAAA`/`ALIAS`/`CNAME`/`NS`/`MX`/`TXT`/`SRV`/`CAA`/`PTR`].
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
"""
The value of the DNS Record.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The number of port. This must be in the range [`1`-`65535`].
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[int]]:
"""
The priority of target DNS Record. This must be in the range [`0`-`65535`].
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter
def ttl(self) -> Optional[pulumi.Input[int]]:
"""
The number of the TTL.
"""
return pulumi.get(self, "ttl")
@ttl.setter
def ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ttl", value)
@property
@pulumi.getter
def weight(self) -> Optional[pulumi.Input[int]]:
"""
The weight of target DNS Record. This must be in the range [`0`-`65535`].
"""
return pulumi.get(self, "weight")
@weight.setter
def weight(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "weight", value)
@pulumi.input_type
class DatabaseBackupArgs:
def __init__(__self__, *,
time: Optional[pulumi.Input[str]] = None,
weekdays: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] time: The time to take backup. This must be formatted with `HH:mm`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] weekdays: A list of weekdays to backed up. The values in the list must be in [`sun`/`mon`/`tue`/`wed`/`thu`/`fri`/`sat`].
"""
if time is not None:
pulumi.set(__self__, "time", time)
if weekdays is not None:
pulumi.set(__self__, "weekdays", weekdays)
@property
@pulumi.getter
def time(self) -> Optional[pulumi.Input[str]]:
"""
The time to take backup. This must be formatted with `HH:mm`.
"""
return pulumi.get(self, "time")
@time.setter
def time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time", value)
@property
@pulumi.getter
def weekdays(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of weekdays to backed up. The values in the list must be in [`sun`/`mon`/`tue`/`wed`/`thu`/`fri`/`sat`].
"""
return pulumi.get(self, "weekdays")
@weekdays.setter
def weekdays(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "weekdays", value)
@pulumi.input_type
class DatabaseNetworkInterfaceArgs:
def __init__(__self__, *,
gateway: pulumi.Input[str],
ip_address: pulumi.Input[str],
netmask: pulumi.Input[int],
switch_id: pulumi.Input[str],
port: Optional[pulumi.Input[int]] = None,
source_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] gateway: The IP address of the gateway used by Database.
:param pulumi.Input[str] ip_address: The IP address to assign to the Database.
:param pulumi.Input[int] netmask: The bit length of the subnet to assign to the Database. This must be in the range [`8`-`29`].
:param pulumi.Input[str] switch_id: The id of the switch to which the Database connects.
:param pulumi.Input[int] port: The number of the listening port. This must be in the range [`1024`-`65535`].
:param pulumi.Input[Sequence[pulumi.Input[str]]] source_ranges: The range of source IP addresses that allow to access to the Database via network.
"""
pulumi.set(__self__, "gateway", gateway)
pulumi.set(__self__, "ip_address", ip_address)
pulumi.set(__self__, "netmask", netmask)
pulumi.set(__self__, "switch_id", switch_id)
if port is not None:
pulumi.set(__self__, "port", port)
if source_ranges is not None:
pulumi.set(__self__, "source_ranges", source_ranges)
@property
@pulumi.getter
def gateway(self) -> pulumi.Input[str]:
"""
The IP address of the gateway used by Database.
"""
return pulumi.get(self, "gateway")
@gateway.setter
def gateway(self, value: pulumi.Input[str]):
pulumi.set(self, "gateway", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Input[str]:
"""
The IP address to assign to the Database.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def netmask(self) -> pulumi.Input[int]:
"""
The bit length of the subnet to assign to the Database. This must be in the range [`8`-`29`].
"""
return pulumi.get(self, "netmask")
@netmask.setter
def netmask(self, value: pulumi.Input[int]):
pulumi.set(self, "netmask", value)
@property
@pulumi.getter(name="switchId")
def switch_id(self) -> pulumi.Input[str]:
"""
The id of the switch to which the Database connects.
"""
return pulumi.get(self, "switch_id")
@switch_id.setter
def switch_id(self, value: pulumi.Input[str]):
pulumi.set(self, "switch_id", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The number of the listening port. This must be in the range [`1024`-`65535`].
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="sourceRanges")
def source_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The range of source IP addresses that allow to access to the Database via network.
"""
return pulumi.get(self, "source_ranges")
@source_ranges.setter
def source_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "source_ranges", value)
@pulumi.input_type
class DatabaseReadReplicaNetworkInterfaceArgs:
def __init__(__self__, *,
ip_address: pulumi.Input[str],
gateway: Optional[pulumi.Input[str]] = None,
netmask: Optional[pulumi.Input[int]] = None,
source_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
switch_id: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] ip_address: The IP address to assign to the read-replica database.
:param pulumi.Input[str] gateway: The IP address of the gateway used by read-replica database. If `gateway` isn't specified, it will be set to the same value of the master database.
:param pulumi.Input[int] netmask: The bit length of the subnet to assign to the read-replica database. This must be in the range [`8`-`29`]. If `netmask` isn't specified, it will be set to the same value of the master database.
:param pulumi.Input[Sequence[pulumi.Input[str]]] source_ranges: The range of source IP addresses that allow to access to the read-replica database via network.
:param pulumi.Input[str] switch_id: The id of the switch to which the read-replica database connects. If `switch_id` isn't specified, it will be set to the same value of the master database.
"""
pulumi.set(__self__, "ip_address", ip_address)
if gateway is not None:
pulumi.set(__self__, "gateway", gateway)
if netmask is not None:
pulumi.set(__self__, "netmask", netmask)
if source_ranges is not None:
pulumi.set(__self__, "source_ranges", source_ranges)
if switch_id is not None:
pulumi.set(__self__, "switch_id", switch_id)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Input[str]:
"""
The IP address to assign to the read-replica database.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def gateway(self) -> Optional[pulumi.Input[str]]:
"""
The IP address of the gateway used by read-replica database. If `gateway` isn't specified, it will be set to the same value of the master database.
"""
return pulumi.get(self, "gateway")
@gateway.setter
def gateway(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gateway", value)
@property
@pulumi.getter
def netmask(self) -> Optional[pulumi.Input[int]]:
"""
The bit length of the subnet to assign to the read-replica database. This must be in the range [`8`-`29`]. If `netmask` isn't specified, it will be set to the same value of the master database.
"""
return pulumi.get(self, "netmask")
@netmask.setter
def netmask(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "netmask", value)
@property
@pulumi.getter(name="sourceRanges")
def source_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The range of source IP addresses that allow to access to the read-replica database via network.
"""
return pulumi.get(self, "source_ranges")
@source_ranges.setter
def source_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "source_ranges", value)
@property
@pulumi.getter(name="switchId")
def switch_id(self) -> Optional[pulumi.Input[str]]:
"""
The id of the switch to which the read-replica database connects. If `switch_id` isn't specified, it will be set to the same value of the master database.
"""
return pulumi.get(self, "switch_id")
@switch_id.setter
def switch_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "switch_id", value)
@pulumi.input_type
class GSLBHealthCheckArgs:
def __init__(__self__, *,
protocol: pulumi.Input[str],
delay_loop: Optional[pulumi.Input[int]] = None,
host_header: Optional[pulumi.Input[str]] = None,
path: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
status: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] protocol: The protocol used for health checks. This must be one of [`http`/`https`/`tcp`/`ping`].
:param pulumi.Input[int] delay_loop: The interval in seconds between checks. This must be in the range [`10`-`60`].
:param pulumi.Input[str] host_header: The value of host header send when checking by HTTP/HTTPS.
:param pulumi.Input[str] path: The path used when checking by HTTP/HTTPS.
:param pulumi.Input[int] port: The port number used when checking by TCP.
:param pulumi.Input[str] status: The response-code to expect when checking by HTTP/HTTPS.
"""
pulumi.set(__self__, "protocol", protocol)
if delay_loop is not None:
pulumi.set(__self__, "delay_loop", delay_loop)
if host_header is not None:
pulumi.set(__self__, "host_header", host_header)
if path is not None:
pulumi.set(__self__, "path", path)
if port is not None:
pulumi.set(__self__, "port", port)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[str]:
"""
The protocol used for health checks. This must be one of [`http`/`https`/`tcp`/`ping`].
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="delayLoop")
def delay_loop(self) -> Optional[pulumi.Input[int]]:
"""
The interval in seconds between checks. This must be in the range [`10`-`60`].
"""
return pulumi.get(self, "delay_loop")
@delay_loop.setter
def delay_loop(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "delay_loop", value)
@property
@pulumi.getter(name="hostHeader")
def host_header(self) -> Optional[pulumi.Input[str]]:
"""
The value of host header send when checking by HTTP/HTTPS.
"""
return pulumi.get(self, "host_header")
@host_header.setter
def host_header(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_header", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
"""
The path used when checking by HTTP/HTTPS.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The port number used when checking by TCP.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The response-code to expect when checking by HTTP/HTTPS.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@pulumi.input_type
class GSLBServerArgs:
def __init__(__self__, *,
ip_address: pulumi.Input[str],
enabled: Optional[pulumi.Input[bool]] = None,
weight: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] ip_address: The IP address of the server.
:param pulumi.Input[bool] enabled: The flag to enable as destination of load balancing.
:param pulumi.Input[int] weight: The weight used when weighted load balancing is enabled. This must be in the range [`1`-`10000`].
"""
pulumi.set(__self__, "ip_address", ip_address)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Input[str]:
"""
The IP address of the server.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to enable as destination of load balancing.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def weight(self) -> Optional[pulumi.Input[int]]:
"""
The weight used when weighted load balancing is enabled. This must be in the range [`1`-`10000`].
"""
return pulumi.get(self, "weight")
@weight.setter
def weight(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "weight", value)
@pulumi.input_type
class LoadBalancerNetworkInterfaceArgs:
def __init__(__self__, *,
ip_addresses: pulumi.Input[Sequence[pulumi.Input[str]]],
netmask: pulumi.Input[int],
switch_id: pulumi.Input[str],
vrid: pulumi.Input[int],
gateway: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input[str]]] ip_addresses: A list of IP address to assign to the LoadBalancer. .
:param pulumi.Input[int] netmask: The bit length of the subnet assigned to the LoadBalancer. This must be in the range [`8`-`29`].
:param pulumi.Input[str] switch_id: The id of the switch to which the LoadBalancer connects.
:param pulumi.Input[int] vrid: The Virtual Router Identifier.
:param pulumi.Input[str] gateway: The IP address of the gateway used by LoadBalancer.
"""
pulumi.set(__self__, "ip_addresses", ip_addresses)
pulumi.set(__self__, "netmask", netmask)
pulumi.set(__self__, "switch_id", switch_id)
pulumi.set(__self__, "vrid", vrid)
if gateway is not None:
pulumi.set(__self__, "gateway", gateway)
@property
@pulumi.getter(name="ipAddresses")
def ip_addresses(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A list of IP address to assign to the LoadBalancer. .
"""
return pulumi.get(self, "ip_addresses")
@ip_addresses.setter
def ip_addresses(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "ip_addresses", value)
@property
@pulumi.getter
def netmask(self) -> pulumi.Input[int]:
"""
The bit length of the subnet assigned to the LoadBalancer. This must be in the range [`8`-`29`].
"""
return pulumi.get(self, "netmask")
@netmask.setter
def netmask(self, value: pulumi.Input[int]):
pulumi.set(self, "netmask", value)
@property
@pulumi.getter(name="switchId")
def switch_id(self) -> pulumi.Input[str]:
"""
The id of the switch to which the LoadBalancer connects.
"""
return pulumi.get(self, "switch_id")
@switch_id.setter
def switch_id(self, value: pulumi.Input[str]):
pulumi.set(self, "switch_id", value)
@property
@pulumi.getter
def vrid(self) -> pulumi.Input[int]:
"""
The Virtual Router Identifier.
"""
return pulumi.get(self, "vrid")
@vrid.setter
def vrid(self, value: pulumi.Input[int]):
pulumi.set(self, "vrid", value)
@property
@pulumi.getter
def gateway(self) -> Optional[pulumi.Input[str]]:
"""
The IP address of the gateway used by LoadBalancer.
"""
return pulumi.get(self, "gateway")
@gateway.setter
def gateway(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gateway", value)
@pulumi.input_type
class LoadBalancerVipArgs:
def __init__(__self__, *,
port: pulumi.Input[int],
vip: pulumi.Input[str],
delay_loop: Optional[pulumi.Input[int]] = None,
description: Optional[pulumi.Input[str]] = None,
servers: Optional[pulumi.Input[Sequence[pulumi.Input['LoadBalancerVipServerArgs']]]] = None,
sorry_server: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[int] port: The target port number for load-balancing. This must be in the range [`1`-`65535`].
:param pulumi.Input[str] vip: The virtual IP address.
:param pulumi.Input[int] delay_loop: The interval in seconds between checks. This must be in the range [`10`-`2147483647`].
:param pulumi.Input[str] description: The description of the VIP. The length of this value must be in the range [`1`-`512`].
:param pulumi.Input[Sequence[pulumi.Input['LoadBalancerVipServerArgs']]] servers: One or more `server` blocks as defined below.
:param pulumi.Input[str] sorry_server: The IP address of the SorryServer. This will be used when all servers under this VIP are down.
"""
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "vip", vip)
if delay_loop is not None:
pulumi.set(__self__, "delay_loop", delay_loop)
if description is not None:
pulumi.set(__self__, "description", description)
if servers is not None:
pulumi.set(__self__, "servers", servers)
if sorry_server is not None:
pulumi.set(__self__, "sorry_server", sorry_server)
@property
@pulumi.getter
def port(self) -> pulumi.Input[int]:
"""
The target port number for load-balancing. This must be in the range [`1`-`65535`].
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: pulumi.Input[int]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def vip(self) -> pulumi.Input[str]:
"""
The virtual IP address.
"""
return pulumi.get(self, "vip")
@vip.setter
def vip(self, value: pulumi.Input[str]):
pulumi.set(self, "vip", value)
@property
@pulumi.getter(name="delayLoop")
def delay_loop(self) -> Optional[pulumi.Input[int]]:
"""
The interval in seconds between checks. This must be in the range [`10`-`2147483647`].
"""
return pulumi.get(self, "delay_loop")
@delay_loop.setter
def delay_loop(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "delay_loop", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the VIP. The length of this value must be in the range [`1`-`512`].
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def servers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LoadBalancerVipServerArgs']]]]:
"""
One or more `server` blocks as defined below.
"""
return pulumi.get(self, "servers")
@servers.setter
def servers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['LoadBalancerVipServerArgs']]]]):
pulumi.set(self, "servers", value)
@property
@pulumi.getter(name="sorryServer")
def sorry_server(self) -> Optional[pulumi.Input[str]]:
"""
The IP address of the SorryServer. This will be used when all servers under this VIP are down.
"""
return pulumi.get(self, "sorry_server")
@sorry_server.setter
def sorry_server(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sorry_server", value)
@pulumi.input_type
class LoadBalancerVipServerArgs:
def __init__(__self__, *,
ip_address: pulumi.Input[str],
protocol: pulumi.Input[str],
enabled: Optional[pulumi.Input[bool]] = None,
path: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] ip_address: The IP address of the destination server.
:param pulumi.Input[str] protocol: The protocol used for health checks. This must be one of [`http`/`https`/`tcp`/`ping`].
:param pulumi.Input[bool] enabled: The flag to enable as destination of load balancing.
:param pulumi.Input[str] path: The path used when checking by HTTP/HTTPS.
:param pulumi.Input[str] status: The response code to expect when checking by HTTP/HTTPS.
"""
pulumi.set(__self__, "ip_address", ip_address)
pulumi.set(__self__, "protocol", protocol)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if path is not None:
pulumi.set(__self__, "path", path)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Input[str]:
"""
The IP address of the destination server.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[str]:
"""
The protocol used for health checks. This must be one of [`http`/`https`/`tcp`/`ping`].
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to enable as destination of load balancing.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
"""
The path used when checking by HTTP/HTTPS.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The response code to expect when checking by HTTP/HTTPS.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@pulumi.input_type
class LocalRouterNetworkInterfaceArgs:
def __init__(__self__, *,
ip_addresses: pulumi.Input[Sequence[pulumi.Input[str]]],
netmask: pulumi.Input[int],
vip: pulumi.Input[str],
vrid: pulumi.Input[int]):
"""
:param pulumi.Input[Sequence[pulumi.Input[str]]] ip_addresses: A list of IP address to assign to the LocalRouter.
:param pulumi.Input[int] netmask: The bit length of the subnet assigned to the LocalRouter. This must be in the range [`8`-`29`].
:param pulumi.Input[str] vip: The virtual IP address.
:param pulumi.Input[int] vrid: The Virtual Router Identifier.
"""
pulumi.set(__self__, "ip_addresses", ip_addresses)
pulumi.set(__self__, "netmask", netmask)
pulumi.set(__self__, "vip", vip)
pulumi.set(__self__, "vrid", vrid)
@property
@pulumi.getter(name="ipAddresses")
def ip_addresses(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A list of IP address to assign to the LocalRouter.
"""
return pulumi.get(self, "ip_addresses")
@ip_addresses.setter
def ip_addresses(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "ip_addresses", value)
@property
@pulumi.getter
def netmask(self) -> pulumi.Input[int]:
"""
The bit length of the subnet assigned to the LocalRouter. This must be in the range [`8`-`29`].
"""
return pulumi.get(self, "netmask")
@netmask.setter
def netmask(self, value: pulumi.Input[int]):
pulumi.set(self, "netmask", value)
@property
@pulumi.getter
def vip(self) -> pulumi.Input[str]:
"""
The virtual IP address.
"""
return pulumi.get(self, "vip")
@vip.setter
def vip(self, value: pulumi.Input[str]):
pulumi.set(self, "vip", value)
@property
@pulumi.getter
def vrid(self) -> pulumi.Input[int]:
"""
The Virtual Router Identifier.
"""
return pulumi.get(self, "vrid")
@vrid.setter
def vrid(self, value: pulumi.Input[int]):
pulumi.set(self, "vrid", value)
@pulumi.input_type
class LocalRouterPeerArgs:
def __init__(__self__, *,
peer_id: pulumi.Input[str],
secret_key: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None):
"""
:param pulumi.Input[str] peer_id: The ID of the peer LocalRouter.
:param pulumi.Input[str] secret_key: The secret key of the peer LocalRouter.
:param pulumi.Input[str] description: The description of the LocalRouter. The length of this value must be in the range [`1`-`512`].
:param pulumi.Input[bool] enabled: The flag to enable the LocalRouter.
"""
pulumi.set(__self__, "peer_id", peer_id)
pulumi.set(__self__, "secret_key", secret_key)
if description is not None:
pulumi.set(__self__, "description", description)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter(name="peerId")
def peer_id(self) -> pulumi.Input[str]:
"""
The ID of the peer LocalRouter.
"""
return pulumi.get(self, "peer_id")
@peer_id.setter
def peer_id(self, value: pulumi.Input[str]):
pulumi.set(self, "peer_id", value)
@property
@pulumi.getter(name="secretKey")
def secret_key(self) -> pulumi.Input[str]:
"""
The secret key of the peer LocalRouter.
"""
return pulumi.get(self, "secret_key")
@secret_key.setter
def secret_key(self, value: pulumi.Input[str]):
pulumi.set(self, "secret_key", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the LocalRouter. The length of this value must be in the range [`1`-`512`].
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to enable the LocalRouter.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class LocalRouterStaticRouteArgs:
def __init__(__self__, *,
next_hop: pulumi.Input[str],
prefix: pulumi.Input[str]):
"""
:param pulumi.Input[str] next_hop: The IP address of the next hop.
:param pulumi.Input[str] prefix: The CIDR block of destination.
"""
pulumi.set(__self__, "next_hop", next_hop)
pulumi.set(__self__, "prefix", prefix)
@property
@pulumi.getter(name="nextHop")
def next_hop(self) -> pulumi.Input[str]:
"""
The IP address of the next hop.
"""
return pulumi.get(self, "next_hop")
@next_hop.setter
def next_hop(self, value: pulumi.Input[str]):
pulumi.set(self, "next_hop", value)
@property
@pulumi.getter
def prefix(self) -> pulumi.Input[str]:
"""
The CIDR block of destination.
"""
return pulumi.get(self, "prefix")
@prefix.setter
def prefix(self, value: pulumi.Input[str]):
pulumi.set(self, "prefix", value)
@pulumi.input_type
class LocalRouterSwitchArgs:
def __init__(__self__, *,
code: pulumi.Input[str],
zone_id: pulumi.Input[str],
category: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] code: The resource ID of the Switch.
:param pulumi.Input[str] zone_id: The id of the Zone.
:param pulumi.Input[str] category: The category name of connected services (e.g. `cloud`, `vps`).
"""
pulumi.set(__self__, "code", code)
pulumi.set(__self__, "zone_id", zone_id)
if category is not None:
pulumi.set(__self__, "category", category)
@property
@pulumi.getter
def code(self) -> pulumi.Input[str]:
"""
The resource ID of the Switch.
"""
return pulumi.get(self, "code")
@code.setter
def code(self, value: pulumi.Input[str]):
pulumi.set(self, "code", value)
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> pulumi.Input[str]:
"""
The id of the Zone.
"""
return pulumi.get(self, "zone_id")
@zone_id.setter
def zone_id(self, value: pulumi.Input[str]):
pulumi.set(self, "zone_id", value)
@property
@pulumi.getter
def category(self) -> Optional[pulumi.Input[str]]:
"""
The category name of connected services (e.g. `cloud`, `vps`).
"""
return pulumi.get(self, "category")
@category.setter
def category(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "category", value)
@pulumi.input_type
class MobileGatewayPrivateNetworkInterfaceArgs:
def __init__(__self__, *,
ip_address: pulumi.Input[str],
netmask: pulumi.Input[int],
switch_id: pulumi.Input[str]):
"""
:param pulumi.Input[str] ip_address: The IP address to assign to the MobileGateway.
:param pulumi.Input[int] netmask: The bit length of the subnet to assign to the MobileGateway. This must be in the range [`8`-`29`].
:param pulumi.Input[str] switch_id: The id of the switch to which the MobileGateway connects.
"""
pulumi.set(__self__, "ip_address", ip_address)
pulumi.set(__self__, "netmask", netmask)
pulumi.set(__self__, "switch_id", switch_id)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Input[str]:
"""
The IP address to assign to the MobileGateway.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def netmask(self) -> pulumi.Input[int]:
"""
The bit length of the subnet to assign to the MobileGateway. This must be in the range [`8`-`29`].
"""
return pulumi.get(self, "netmask")
@netmask.setter
def netmask(self, value: pulumi.Input[int]):
pulumi.set(self, "netmask", value)
@property
@pulumi.getter(name="switchId")
def switch_id(self) -> pulumi.Input[str]:
"""
The id of the switch to which the MobileGateway connects.
"""
return pulumi.get(self, "switch_id")
@switch_id.setter
def switch_id(self, value: pulumi.Input[str]):
pulumi.set(self, "switch_id", value)
@pulumi.input_type
class MobileGatewaySimArgs:
def __init__(__self__, *,
ip_address: pulumi.Input[str],
sim_id: pulumi.Input[str]):
"""
:param pulumi.Input[str] ip_address: The IP address to assign to the SIM.
:param pulumi.Input[str] sim_id: The id of the Switch connected to the MobileGateway.
"""
pulumi.set(__self__, "ip_address", ip_address)
pulumi.set(__self__, "sim_id", sim_id)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Input[str]:
"""
The IP address to assign to the SIM.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter(name="simId")
def sim_id(self) -> pulumi.Input[str]:
"""
The id of the Switch connected to the MobileGateway.
"""
return pulumi.get(self, "sim_id")
@sim_id.setter
def sim_id(self, value: pulumi.Input[str]):
pulumi.set(self, "sim_id", value)
@pulumi.input_type
class MobileGatewaySimRouteArgs:
def __init__(__self__, *,
prefix: pulumi.Input[str],
sim_id: pulumi.Input[str]):
"""
:param pulumi.Input[str] prefix: The destination network prefix used by the sim routing. This must be specified by CIDR block formatted string.
:param pulumi.Input[str] sim_id: The id of the routing destination SIM.
"""
pulumi.set(__self__, "prefix", prefix)
pulumi.set(__self__, "sim_id", sim_id)
@property
@pulumi.getter
def prefix(self) -> pulumi.Input[str]:
"""
The destination network prefix used by the sim routing. This must be specified by CIDR block formatted string.
"""
return pulumi.get(self, "prefix")
@prefix.setter
def prefix(self, value: pulumi.Input[str]):
pulumi.set(self, "prefix", value)
@property
@pulumi.getter(name="simId")
def sim_id(self) -> pulumi.Input[str]:
"""
The id of the routing destination SIM.
"""
return pulumi.get(self, "sim_id")
@sim_id.setter
def sim_id(self, value: pulumi.Input[str]):
pulumi.set(self, "sim_id", value)
@pulumi.input_type
class MobileGatewayStaticRouteArgs:
def __init__(__self__, *,
next_hop: pulumi.Input[str],
prefix: pulumi.Input[str]):
"""
:param pulumi.Input[str] next_hop: The IP address of next hop.
:param pulumi.Input[str] prefix: The destination network prefix used by static routing. This must be specified by CIDR block formatted string.
"""
pulumi.set(__self__, "next_hop", next_hop)
pulumi.set(__self__, "prefix", prefix)
@property
@pulumi.getter(name="nextHop")
def next_hop(self) -> pulumi.Input[str]:
"""
The IP address of next hop.
"""
return pulumi.get(self, "next_hop")
@next_hop.setter
def next_hop(self, value: pulumi.Input[str]):
pulumi.set(self, "next_hop", value)
@property
@pulumi.getter
def prefix(self) -> pulumi.Input[str]:
"""
The destination network prefix used by static routing. This must be specified by CIDR block formatted string.
"""
return pulumi.get(self, "prefix")
@prefix.setter
def prefix(self, value: pulumi.Input[str]):
pulumi.set(self, "prefix", value)
@pulumi.input_type
class MobileGatewayTrafficControlArgs:
def __init__(__self__, *,
quota: pulumi.Input[int],
auto_traffic_shaping: Optional[pulumi.Input[bool]] = None,
band_width_limit: Optional[pulumi.Input[int]] = None,
enable_email: Optional[pulumi.Input[bool]] = None,
enable_slack: Optional[pulumi.Input[bool]] = None,
slack_webhook: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[int] quota: The threshold of monthly traffic usage to enable to the traffic shaping.
:param pulumi.Input[bool] auto_traffic_shaping: The flag to enable the traffic shaping.
:param pulumi.Input[int] band_width_limit: The bandwidth allowed when the traffic shaping is enabled.
:param pulumi.Input[bool] enable_email: The flag to enable email notification when the traffic shaping is enabled.
:param pulumi.Input[bool] enable_slack: The flag to enable slack notification when the traffic shaping is enabled.
:param pulumi.Input[str] slack_webhook: The webhook URL used when sends notification. It will only used when `enable_slack` is set `true`.
"""
pulumi.set(__self__, "quota", quota)
if auto_traffic_shaping is not None:
pulumi.set(__self__, "auto_traffic_shaping", auto_traffic_shaping)
if band_width_limit is not None:
pulumi.set(__self__, "band_width_limit", band_width_limit)
if enable_email is not None:
pulumi.set(__self__, "enable_email", enable_email)
if enable_slack is not None:
pulumi.set(__self__, "enable_slack", enable_slack)
if slack_webhook is not None:
pulumi.set(__self__, "slack_webhook", slack_webhook)
@property
@pulumi.getter
def quota(self) -> pulumi.Input[int]:
"""
The threshold of monthly traffic usage to enable to the traffic shaping.
"""
return pulumi.get(self, "quota")
@quota.setter
def quota(self, value: pulumi.Input[int]):
pulumi.set(self, "quota", value)
@property
@pulumi.getter(name="autoTrafficShaping")
def auto_traffic_shaping(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to enable the traffic shaping.
"""
return pulumi.get(self, "auto_traffic_shaping")
@auto_traffic_shaping.setter
def auto_traffic_shaping(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "auto_traffic_shaping", value)
@property
@pulumi.getter(name="bandWidthLimit")
def band_width_limit(self) -> Optional[pulumi.Input[int]]:
"""
The bandwidth allowed when the traffic shaping is enabled.
"""
return pulumi.get(self, "band_width_limit")
@band_width_limit.setter
def band_width_limit(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "band_width_limit", value)
@property
@pulumi.getter(name="enableEmail")
def enable_email(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to enable email notification when the traffic shaping is enabled.
"""
return pulumi.get(self, "enable_email")
@enable_email.setter
def enable_email(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_email", value)
@property
@pulumi.getter(name="enableSlack")
def enable_slack(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to enable slack notification when the traffic shaping is enabled.
"""
return pulumi.get(self, "enable_slack")
@enable_slack.setter
def enable_slack(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_slack", value)
@property
@pulumi.getter(name="slackWebhook")
def slack_webhook(self) -> Optional[pulumi.Input[str]]:
"""
The webhook URL used when sends notification. It will only used when `enable_slack` is set `true`.
"""
return pulumi.get(self, "slack_webhook")
@slack_webhook.setter
def slack_webhook(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "slack_webhook", value)
@pulumi.input_type
class NFSNetworkInterfaceArgs:
def __init__(__self__, *,
ip_address: pulumi.Input[str],
netmask: pulumi.Input[int],
switch_id: pulumi.Input[str],
gateway: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] ip_address: The IP address to assign to the NFS.
:param pulumi.Input[int] netmask: The bit length of the subnet to assign to the NFS. This must be in the range [`8`-`29`].
:param pulumi.Input[str] switch_id: The id of the switch to which the NFS connects.
:param pulumi.Input[str] gateway: The IP address of the gateway used by NFS.
"""
pulumi.set(__self__, "ip_address", ip_address)
pulumi.set(__self__, "netmask", netmask)
pulumi.set(__self__, "switch_id", switch_id)
if gateway is not None:
pulumi.set(__self__, "gateway", gateway)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Input[str]:
"""
The IP address to assign to the NFS.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def netmask(self) -> pulumi.Input[int]:
"""
The bit length of the subnet to assign to the NFS. This must be in the range [`8`-`29`].
"""
return pulumi.get(self, "netmask")
@netmask.setter
def netmask(self, value: pulumi.Input[int]):
pulumi.set(self, "netmask", value)
@property
@pulumi.getter(name="switchId")
def switch_id(self) -> pulumi.Input[str]:
"""
The id of the switch to which the NFS connects.
"""
return pulumi.get(self, "switch_id")
@switch_id.setter
def switch_id(self, value: pulumi.Input[str]):
pulumi.set(self, "switch_id", value)
@property
@pulumi.getter
def gateway(self) -> Optional[pulumi.Input[str]]:
"""
The IP address of the gateway used by NFS.
"""
return pulumi.get(self, "gateway")
@gateway.setter
def gateway(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gateway", value)
@pulumi.input_type
class PacketFilterExpressionArgs:
def __init__(__self__, *,
protocol: pulumi.Input[str],
allow: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
destination_port: Optional[pulumi.Input[str]] = None,
source_network: Optional[pulumi.Input[str]] = None,
source_port: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] protocol: The protocol used for filtering. This must be one of [`http`/`https`/`tcp`/`udp`/`icmp`/`fragment`/`ip`].
:param pulumi.Input[bool] allow: The flag to allow the packet through the filter.
:param pulumi.Input[str] description: The description of the packetFilter. The length of this value must be in the range [`1`-`512`].
:param pulumi.Input[str] destination_port: A destination port number or port range used for filtering (e.g. `1024`, `1024-2048`).
:param pulumi.Input[str] source_network: A source IP address or CIDR block used for filtering (e.g. `192.0.2.1`, `192.0.2.0/24`).
:param pulumi.Input[str] source_port: A source port number or port range used for filtering (e.g. `1024`, `1024-2048`).
"""
pulumi.set(__self__, "protocol", protocol)
if allow is not None:
pulumi.set(__self__, "allow", allow)
if description is not None:
pulumi.set(__self__, "description", description)
if destination_port is not None:
pulumi.set(__self__, "destination_port", destination_port)
if source_network is not None:
pulumi.set(__self__, "source_network", source_network)
if source_port is not None:
pulumi.set(__self__, "source_port", source_port)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[str]:
"""
The protocol used for filtering. This must be one of [`http`/`https`/`tcp`/`udp`/`icmp`/`fragment`/`ip`].
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter
def allow(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to allow the packet through the filter.
"""
return pulumi.get(self, "allow")
@allow.setter
def allow(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "allow", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the packetFilter. The length of this value must be in the range [`1`-`512`].
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="destinationPort")
def destination_port(self) -> Optional[pulumi.Input[str]]:
"""
A destination port number or port range used for filtering (e.g. `1024`, `1024-2048`).
"""
return pulumi.get(self, "destination_port")
@destination_port.setter
def destination_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "destination_port", value)
@property
@pulumi.getter(name="sourceNetwork")
def source_network(self) -> Optional[pulumi.Input[str]]:
"""
A source IP address or CIDR block used for filtering (e.g. `192.0.2.1`, `192.0.2.0/24`).
"""
return pulumi.get(self, "source_network")
@source_network.setter
def source_network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_network", value)
@property
@pulumi.getter(name="sourcePort")
def source_port(self) -> Optional[pulumi.Input[str]]:
"""
A source port number or port range used for filtering (e.g. `1024`, `1024-2048`).
"""
return pulumi.get(self, "source_port")
@source_port.setter
def source_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_port", value)
@pulumi.input_type
class PacketFilterRuleExpressionArgs:
def __init__(__self__, *,
protocol: pulumi.Input[str],
allow: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
destination_port: Optional[pulumi.Input[str]] = None,
source_network: Optional[pulumi.Input[str]] = None,
source_port: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] protocol: The protocol used for filtering. This must be one of [`http`/`https`/`tcp`/`udp`/`icmp`/`fragment`/`ip`].
:param pulumi.Input[bool] allow: The flag to allow the packet through the filter.
:param pulumi.Input[str] description: The description of the expression.
:param pulumi.Input[str] destination_port: A destination port number or port range used for filtering (e.g. `1024`, `1024-2048`).
:param pulumi.Input[str] source_network: A source IP address or CIDR block used for filtering (e.g. `192.0.2.1`, `192.0.2.0/24`).
:param pulumi.Input[str] source_port: A source port number or port range used for filtering (e.g. `1024`, `1024-2048`).
"""
pulumi.set(__self__, "protocol", protocol)
if allow is not None:
pulumi.set(__self__, "allow", allow)
if description is not None:
pulumi.set(__self__, "description", description)
if destination_port is not None:
pulumi.set(__self__, "destination_port", destination_port)
if source_network is not None:
pulumi.set(__self__, "source_network", source_network)
if source_port is not None:
pulumi.set(__self__, "source_port", source_port)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[str]:
"""
The protocol used for filtering. This must be one of [`http`/`https`/`tcp`/`udp`/`icmp`/`fragment`/`ip`].
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter
def allow(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to allow the packet through the filter.
"""
return pulumi.get(self, "allow")
@allow.setter
def allow(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "allow", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the expression.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="destinationPort")
def destination_port(self) -> Optional[pulumi.Input[str]]:
"""
A destination port number or port range used for filtering (e.g. `1024`, `1024-2048`).
"""
return pulumi.get(self, "destination_port")
@destination_port.setter
def destination_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "destination_port", value)
@property
@pulumi.getter(name="sourceNetwork")
def source_network(self) -> Optional[pulumi.Input[str]]:
"""
A source IP address or CIDR block used for filtering (e.g. `192.0.2.1`, `192.0.2.0/24`).
"""
return pulumi.get(self, "source_network")
@source_network.setter
def source_network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_network", value)
@property
@pulumi.getter(name="sourcePort")
def source_port(self) -> Optional[pulumi.Input[str]]:
"""
A source port number or port range used for filtering (e.g. `1024`, `1024-2048`).
"""
return pulumi.get(self, "source_port")
@source_port.setter
def source_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_port", value)
@pulumi.input_type
class ProxyLBACMECertificateArgs:
def __init__(__self__, *,
additional_certificates: Optional[pulumi.Input[Sequence[pulumi.Input['ProxyLBACMECertificateAdditionalCertificateArgs']]]] = None,
common_name: Optional[pulumi.Input[str]] = None,
intermediate_cert: Optional[pulumi.Input[str]] = None,
private_key: Optional[pulumi.Input[str]] = None,
server_cert: Optional[pulumi.Input[str]] = None,
subject_alt_names: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input['ProxyLBACMECertificateAdditionalCertificateArgs']]] additional_certificates: A list of `additional_certificate` blocks as defined below.
:param pulumi.Input[str] common_name: The FQDN used by ACME. This must set resolvable value. Changing this forces a new resource to be created.
:param pulumi.Input[str] intermediate_cert: The intermediate certificate for a server.
:param pulumi.Input[str] private_key: The private key for a server.
:param pulumi.Input[str] server_cert: The certificate for a server.
:param pulumi.Input[str] subject_alt_names: The Subject alternative names used by ACME. Changing this forces a new resource to be created.
"""
if additional_certificates is not None:
pulumi.set(__self__, "additional_certificates", additional_certificates)
if common_name is not None:
pulumi.set(__self__, "common_name", common_name)
if intermediate_cert is not None:
pulumi.set(__self__, "intermediate_cert", intermediate_cert)
if private_key is not None:
pulumi.set(__self__, "private_key", private_key)
if server_cert is not None:
pulumi.set(__self__, "server_cert", server_cert)
if subject_alt_names is not None:
pulumi.set(__self__, "subject_alt_names", subject_alt_names)
@property
@pulumi.getter(name="additionalCertificates")
def additional_certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ProxyLBACMECertificateAdditionalCertificateArgs']]]]:
"""
A list of `additional_certificate` blocks as defined below.
"""
return pulumi.get(self, "additional_certificates")
@additional_certificates.setter
def additional_certificates(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ProxyLBACMECertificateAdditionalCertificateArgs']]]]):
pulumi.set(self, "additional_certificates", value)
@property
@pulumi.getter(name="commonName")
def common_name(self) -> Optional[pulumi.Input[str]]:
"""
The FQDN used by ACME. This must set resolvable value. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "common_name")
@common_name.setter
def common_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "common_name", value)
@property
@pulumi.getter(name="intermediateCert")
def intermediate_cert(self) -> Optional[pulumi.Input[str]]:
"""
The intermediate certificate for a server.
"""
return pulumi.get(self, "intermediate_cert")
@intermediate_cert.setter
def intermediate_cert(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "intermediate_cert", value)
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> Optional[pulumi.Input[str]]:
"""
The private key for a server.
"""
return pulumi.get(self, "private_key")
@private_key.setter
def private_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_key", value)
@property
@pulumi.getter(name="serverCert")
def server_cert(self) -> Optional[pulumi.Input[str]]:
"""
The certificate for a server.
"""
return pulumi.get(self, "server_cert")
@server_cert.setter
def server_cert(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "server_cert", value)
@property
@pulumi.getter(name="subjectAltNames")
def subject_alt_names(self) -> Optional[pulumi.Input[str]]:
"""
The Subject alternative names used by ACME. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "subject_alt_names")
@subject_alt_names.setter
def subject_alt_names(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subject_alt_names", value)
@pulumi.input_type
class ProxyLBACMECertificateAdditionalCertificateArgs:
def __init__(__self__, *,
intermediate_cert: Optional[pulumi.Input[str]] = None,
private_key: Optional[pulumi.Input[str]] = None,
server_cert: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] intermediate_cert: The intermediate certificate for a server.
:param pulumi.Input[str] private_key: The private key for a server.
:param pulumi.Input[str] server_cert: The certificate for a server.
"""
if intermediate_cert is not None:
pulumi.set(__self__, "intermediate_cert", intermediate_cert)
if private_key is not None:
pulumi.set(__self__, "private_key", private_key)
if server_cert is not None:
pulumi.set(__self__, "server_cert", server_cert)
@property
@pulumi.getter(name="intermediateCert")
def intermediate_cert(self) -> Optional[pulumi.Input[str]]:
"""
The intermediate certificate for a server.
"""
return pulumi.get(self, "intermediate_cert")
@intermediate_cert.setter
def intermediate_cert(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "intermediate_cert", value)
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> Optional[pulumi.Input[str]]:
"""
The private key for a server.
"""
return pulumi.get(self, "private_key")
@private_key.setter
def private_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_key", value)
@property
@pulumi.getter(name="serverCert")
def server_cert(self) -> Optional[pulumi.Input[str]]:
"""
The certificate for a server.
"""
return pulumi.get(self, "server_cert")
@server_cert.setter
def server_cert(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "server_cert", value)
@pulumi.input_type
class ProxyLBBindPortArgs:
def __init__(__self__, *,
proxy_mode: pulumi.Input[str],
port: Optional[pulumi.Input[int]] = None,
redirect_to_https: Optional[pulumi.Input[bool]] = None,
response_headers: Optional[pulumi.Input[Sequence[pulumi.Input['ProxyLBBindPortResponseHeaderArgs']]]] = None,
ssl_policy: Optional[pulumi.Input[str]] = None,
support_http2: Optional[pulumi.Input[bool]] = None):
"""
:param pulumi.Input[str] proxy_mode: The proxy mode. This must be one of [`http`/`https`/`tcp`].
:param pulumi.Input[int] port: The number of listening port.
:param pulumi.Input[bool] redirect_to_https: The flag to enable redirection from http to https. This flag is used only when `proxy_mode` is `http`.
:param pulumi.Input[Sequence[pulumi.Input['ProxyLBBindPortResponseHeaderArgs']]] response_headers: One or more `response_header` blocks as defined below.
:param pulumi.Input[str] ssl_policy: The ssl policy. This must be one of [`TLS-1-2-2019-04`/`TLS-1-2-2021-06`/`TLS-1-3-2021-06`].
:param pulumi.Input[bool] support_http2: The flag to enable HTTP/2. This flag is used only when `proxy_mode` is `https`.
"""
pulumi.set(__self__, "proxy_mode", proxy_mode)
if port is not None:
pulumi.set(__self__, "port", port)
if redirect_to_https is not None:
pulumi.set(__self__, "redirect_to_https", redirect_to_https)
if response_headers is not None:
pulumi.set(__self__, "response_headers", response_headers)
if ssl_policy is not None:
pulumi.set(__self__, "ssl_policy", ssl_policy)
if support_http2 is not None:
pulumi.set(__self__, "support_http2", support_http2)
@property
@pulumi.getter(name="proxyMode")
def proxy_mode(self) -> pulumi.Input[str]:
"""
The proxy mode. This must be one of [`http`/`https`/`tcp`].
"""
return pulumi.get(self, "proxy_mode")
@proxy_mode.setter
def proxy_mode(self, value: pulumi.Input[str]):
pulumi.set(self, "proxy_mode", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The number of listening port.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="redirectToHttps")
def redirect_to_https(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to enable redirection from http to https. This flag is used only when `proxy_mode` is `http`.
"""
return pulumi.get(self, "redirect_to_https")
@redirect_to_https.setter
def redirect_to_https(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "redirect_to_https", value)
@property
@pulumi.getter(name="responseHeaders")
def response_headers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ProxyLBBindPortResponseHeaderArgs']]]]:
"""
One or more `response_header` blocks as defined below.
"""
return pulumi.get(self, "response_headers")
@response_headers.setter
def response_headers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ProxyLBBindPortResponseHeaderArgs']]]]):
pulumi.set(self, "response_headers", value)
@property
@pulumi.getter(name="sslPolicy")
def ssl_policy(self) -> Optional[pulumi.Input[str]]:
"""
The ssl policy. This must be one of [`TLS-1-2-2019-04`/`TLS-1-2-2021-06`/`TLS-1-3-2021-06`].
"""
return pulumi.get(self, "ssl_policy")
@ssl_policy.setter
def ssl_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ssl_policy", value)
@property
@pulumi.getter(name="supportHttp2")
def support_http2(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to enable HTTP/2. This flag is used only when `proxy_mode` is `https`.
"""
return pulumi.get(self, "support_http2")
@support_http2.setter
def support_http2(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "support_http2", value)
@pulumi.input_type
class ProxyLBBindPortResponseHeaderArgs:
def __init__(__self__, *,
header: pulumi.Input[str],
value: pulumi.Input[str]):
"""
:param pulumi.Input[str] header: The field name of HTTP header added to response by the ProxyLB.
:param pulumi.Input[str] value: The field value of HTTP header added to response by the ProxyLB.
"""
pulumi.set(__self__, "header", header)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def header(self) -> pulumi.Input[str]:
"""
The field name of HTTP header added to response by the ProxyLB.
"""
return pulumi.get(self, "header")
@header.setter
def header(self, value: pulumi.Input[str]):
pulumi.set(self, "header", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
"""
The field value of HTTP header added to response by the ProxyLB.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@pulumi.input_type
class ProxyLBCertificateArgs:
def __init__(__self__, *,
additional_certificates: Optional[pulumi.Input[Sequence[pulumi.Input['ProxyLBCertificateAdditionalCertificateArgs']]]] = None,
common_name: Optional[pulumi.Input[str]] = None,
intermediate_cert: Optional[pulumi.Input[str]] = None,
private_key: Optional[pulumi.Input[str]] = None,
server_cert: Optional[pulumi.Input[str]] = None,
subject_alt_names: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input['ProxyLBCertificateAdditionalCertificateArgs']]] additional_certificates: One or more `additional_certificate` blocks as defined below.
:param pulumi.Input[str] common_name: The common name of the certificate.
:param pulumi.Input[str] intermediate_cert: The intermediate certificate for a server.
:param pulumi.Input[str] private_key: The private key for a server.
:param pulumi.Input[str] server_cert: The certificate for a server.
:param pulumi.Input[str] subject_alt_names: The subject alternative names of the certificate.
"""
if additional_certificates is not None:
pulumi.set(__self__, "additional_certificates", additional_certificates)
if common_name is not None:
pulumi.set(__self__, "common_name", common_name)
if intermediate_cert is not None:
pulumi.set(__self__, "intermediate_cert", intermediate_cert)
if private_key is not None:
pulumi.set(__self__, "private_key", private_key)
if server_cert is not None:
pulumi.set(__self__, "server_cert", server_cert)
if subject_alt_names is not None:
pulumi.set(__self__, "subject_alt_names", subject_alt_names)
@property
@pulumi.getter(name="additionalCertificates")
def additional_certificates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ProxyLBCertificateAdditionalCertificateArgs']]]]:
"""
One or more `additional_certificate` blocks as defined below.
"""
return pulumi.get(self, "additional_certificates")
@additional_certificates.setter
def additional_certificates(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ProxyLBCertificateAdditionalCertificateArgs']]]]):
pulumi.set(self, "additional_certificates", value)
@property
@pulumi.getter(name="commonName")
def common_name(self) -> Optional[pulumi.Input[str]]:
"""
The common name of the certificate.
"""
return pulumi.get(self, "common_name")
@common_name.setter
def common_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "common_name", value)
@property
@pulumi.getter(name="intermediateCert")
def intermediate_cert(self) -> Optional[pulumi.Input[str]]:
"""
The intermediate certificate for a server.
"""
return pulumi.get(self, "intermediate_cert")
@intermediate_cert.setter
def intermediate_cert(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "intermediate_cert", value)
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> Optional[pulumi.Input[str]]:
"""
The private key for a server.
"""
return pulumi.get(self, "private_key")
@private_key.setter
def private_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_key", value)
@property
@pulumi.getter(name="serverCert")
def server_cert(self) -> Optional[pulumi.Input[str]]:
"""
The certificate for a server.
"""
return pulumi.get(self, "server_cert")
@server_cert.setter
def server_cert(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "server_cert", value)
@property
@pulumi.getter(name="subjectAltNames")
def subject_alt_names(self) -> Optional[pulumi.Input[str]]:
"""
The subject alternative names of the certificate.
"""
return pulumi.get(self, "subject_alt_names")
@subject_alt_names.setter
def subject_alt_names(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subject_alt_names", value)
@pulumi.input_type
class ProxyLBCertificateAdditionalCertificateArgs:
def __init__(__self__, *,
private_key: pulumi.Input[str],
server_cert: pulumi.Input[str],
intermediate_cert: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] private_key: The private key for a server.
:param pulumi.Input[str] server_cert: The certificate for a server.
:param pulumi.Input[str] intermediate_cert: The intermediate certificate for a server.
"""
pulumi.set(__self__, "private_key", private_key)
pulumi.set(__self__, "server_cert", server_cert)
if intermediate_cert is not None:
pulumi.set(__self__, "intermediate_cert", intermediate_cert)
@property
@pulumi.getter(name="privateKey")
def private_key(self) -> pulumi.Input[str]:
"""
The private key for a server.
"""
return pulumi.get(self, "private_key")
@private_key.setter
def private_key(self, value: pulumi.Input[str]):
pulumi.set(self, "private_key", value)
@property
@pulumi.getter(name="serverCert")
def server_cert(self) -> pulumi.Input[str]:
"""
The certificate for a server.
"""
return pulumi.get(self, "server_cert")
@server_cert.setter
def server_cert(self, value: pulumi.Input[str]):
pulumi.set(self, "server_cert", value)
@property
@pulumi.getter(name="intermediateCert")
def intermediate_cert(self) -> Optional[pulumi.Input[str]]:
"""
The intermediate certificate for a server.
"""
return pulumi.get(self, "intermediate_cert")
@intermediate_cert.setter
def intermediate_cert(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "intermediate_cert", value)
@pulumi.input_type
class ProxyLBHealthCheckArgs:
def __init__(__self__, *,
protocol: pulumi.Input[str],
delay_loop: Optional[pulumi.Input[int]] = None,
host_header: Optional[pulumi.Input[str]] = None,
path: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] protocol: The protocol used for health checks. This must be one of [`http`/`tcp`].
:param pulumi.Input[int] delay_loop: The interval in seconds between checks. This must be in the range [`10`-`60`].
:param pulumi.Input[str] host_header: The value of host header send when checking by HTTP.
:param pulumi.Input[str] path: The path used when checking by HTTP.
:param pulumi.Input[int] port: The port number used when checking by TCP.
"""
pulumi.set(__self__, "protocol", protocol)
if delay_loop is not None:
pulumi.set(__self__, "delay_loop", delay_loop)
if host_header is not None:
pulumi.set(__self__, "host_header", host_header)
if path is not None:
pulumi.set(__self__, "path", path)
if port is not None:
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[str]:
"""
The protocol used for health checks. This must be one of [`http`/`tcp`].
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="delayLoop")
def delay_loop(self) -> Optional[pulumi.Input[int]]:
"""
The interval in seconds between checks. This must be in the range [`10`-`60`].
"""
return pulumi.get(self, "delay_loop")
@delay_loop.setter
def delay_loop(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "delay_loop", value)
@property
@pulumi.getter(name="hostHeader")
def host_header(self) -> Optional[pulumi.Input[str]]:
"""
The value of host header send when checking by HTTP.
"""
return pulumi.get(self, "host_header")
@host_header.setter
def host_header(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_header", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
"""
The path used when checking by HTTP.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The port number used when checking by TCP.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@pulumi.input_type
class ProxyLBRuleArgs:
def __init__(__self__, *,
action: Optional[pulumi.Input[str]] = None,
fixed_content_type: Optional[pulumi.Input[str]] = None,
fixed_message_body: Optional[pulumi.Input[str]] = None,
fixed_status_code: Optional[pulumi.Input[str]] = None,
group: Optional[pulumi.Input[str]] = None,
host: Optional[pulumi.Input[str]] = None,
path: Optional[pulumi.Input[str]] = None,
redirect_location: Optional[pulumi.Input[str]] = None,
redirect_status_code: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] action: The type of action to be performed when requests matches the rule. This must be one of [`forward`/`redirect`/`fixed`] Default: `forward`.
:param pulumi.Input[str] fixed_content_type: Content-Type header value for fixed response sent when requests matches the rule. This must be one of [`text/plain`/`text/html`/`application/javascript`/`application/json`].
:param pulumi.Input[str] fixed_message_body: Content body for fixed response sent when requests matches the rule.
:param pulumi.Input[str] fixed_status_code: HTTP status code for fixed response sent when requests matches the rule. This must be one of [`200`/`403`/`503`].
:param pulumi.Input[str] group: The name of load balancing group. When proxyLB received request which matched to `host` and `path`, proxyLB forwards the request to servers that having same group name. The length of this value must be in the range [`1`-`10`].
:param pulumi.Input[str] host: The value of HTTP host header that is used as condition of rule-based balancing.
:param pulumi.Input[str] path: The request path that is used as condition of rule-based balancing.
:param pulumi.Input[str] redirect_location: The URL to redirect to when the request matches the rule. see https://manual.sakura.ad.jp/cloud/appliance/enhanced-lb/#enhanced-lb-rule for details.
:param pulumi.Input[str] redirect_status_code: HTTP status code for redirects sent when requests matches the rule. This must be one of [`301`/`302`].
"""
if action is not None:
pulumi.set(__self__, "action", action)
if fixed_content_type is not None:
pulumi.set(__self__, "fixed_content_type", fixed_content_type)
if fixed_message_body is not None:
pulumi.set(__self__, "fixed_message_body", fixed_message_body)
if fixed_status_code is not None:
pulumi.set(__self__, "fixed_status_code", fixed_status_code)
if group is not None:
pulumi.set(__self__, "group", group)
if host is not None:
pulumi.set(__self__, "host", host)
if path is not None:
pulumi.set(__self__, "path", path)
if redirect_location is not None:
pulumi.set(__self__, "redirect_location", redirect_location)
if redirect_status_code is not None:
pulumi.set(__self__, "redirect_status_code", redirect_status_code)
@property
@pulumi.getter
def action(self) -> Optional[pulumi.Input[str]]:
"""
The type of action to be performed when requests matches the rule. This must be one of [`forward`/`redirect`/`fixed`] Default: `forward`.
"""
return pulumi.get(self, "action")
@action.setter
def action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "action", value)
@property
@pulumi.getter(name="fixedContentType")
def fixed_content_type(self) -> Optional[pulumi.Input[str]]:
"""
Content-Type header value for fixed response sent when requests matches the rule. This must be one of [`text/plain`/`text/html`/`application/javascript`/`application/json`].
"""
return pulumi.get(self, "fixed_content_type")
@fixed_content_type.setter
def fixed_content_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fixed_content_type", value)
@property
@pulumi.getter(name="fixedMessageBody")
def fixed_message_body(self) -> Optional[pulumi.Input[str]]:
"""
Content body for fixed response sent when requests matches the rule.
"""
return pulumi.get(self, "fixed_message_body")
@fixed_message_body.setter
def fixed_message_body(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fixed_message_body", value)
@property
@pulumi.getter(name="fixedStatusCode")
def fixed_status_code(self) -> Optional[pulumi.Input[str]]:
"""
HTTP status code for fixed response sent when requests matches the rule. This must be one of [`200`/`403`/`503`].
"""
return pulumi.get(self, "fixed_status_code")
@fixed_status_code.setter
def fixed_status_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fixed_status_code", value)
@property
@pulumi.getter
def group(self) -> Optional[pulumi.Input[str]]:
"""
The name of load balancing group. When proxyLB received request which matched to `host` and `path`, proxyLB forwards the request to servers that having same group name. The length of this value must be in the range [`1`-`10`].
"""
return pulumi.get(self, "group")
@group.setter
def group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group", value)
@property
@pulumi.getter
def host(self) -> Optional[pulumi.Input[str]]:
"""
The value of HTTP host header that is used as condition of rule-based balancing.
"""
return pulumi.get(self, "host")
@host.setter
def host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
"""
The request path that is used as condition of rule-based balancing.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter(name="redirectLocation")
def redirect_location(self) -> Optional[pulumi.Input[str]]:
"""
The URL to redirect to when the request matches the rule. see https://manual.sakura.ad.jp/cloud/appliance/enhanced-lb/#enhanced-lb-rule for details.
"""
return pulumi.get(self, "redirect_location")
@redirect_location.setter
def redirect_location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "redirect_location", value)
@property
@pulumi.getter(name="redirectStatusCode")
def redirect_status_code(self) -> Optional[pulumi.Input[str]]:
"""
HTTP status code for redirects sent when requests matches the rule. This must be one of [`301`/`302`].
"""
return pulumi.get(self, "redirect_status_code")
@redirect_status_code.setter
def redirect_status_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "redirect_status_code", value)
@pulumi.input_type
class ProxyLBServerArgs:
def __init__(__self__, *,
ip_address: pulumi.Input[str],
port: pulumi.Input[int],
enabled: Optional[pulumi.Input[bool]] = None,
group: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] ip_address: The IP address of the destination server.
:param pulumi.Input[int] port: The port number of the destination server. This must be in the range [`1`-`65535`].
:param pulumi.Input[bool] enabled: The flag to enable as destination of load balancing.
:param pulumi.Input[str] group: The name of load balancing group. This is used when using rule-based load balancing. The length of this value must be in the range [`1`-`10`].
"""
pulumi.set(__self__, "ip_address", ip_address)
pulumi.set(__self__, "port", port)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if group is not None:
pulumi.set(__self__, "group", group)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Input[str]:
"""
The IP address of the destination server.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def port(self) -> pulumi.Input[int]:
"""
The port number of the destination server. This must be in the range [`1`-`65535`].
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: pulumi.Input[int]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to enable as destination of load balancing.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def group(self) -> Optional[pulumi.Input[str]]:
"""
The name of load balancing group. This is used when using rule-based load balancing. The length of this value must be in the range [`1`-`10`].
"""
return pulumi.get(self, "group")
@group.setter
def group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group", value)
@pulumi.input_type
class ProxyLBSorryServerArgs:
def __init__(__self__, *,
ip_address: pulumi.Input[str],
port: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] ip_address: The IP address of the SorryServer. This will be used when all servers are down.
:param pulumi.Input[int] port: The port number of the SorryServer. This will be used when all servers are down.
"""
pulumi.set(__self__, "ip_address", ip_address)
if port is not None:
pulumi.set(__self__, "port", port)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Input[str]:
"""
The IP address of the SorryServer. This will be used when all servers are down.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The port number of the SorryServer. This will be used when all servers are down.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@pulumi.input_type
class ProxyLBSyslogArgs:
def __init__(__self__, *,
port: Optional[pulumi.Input[int]] = None,
server: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[int] port: The number of syslog port.
:param pulumi.Input[str] server: The address of syslog server.
"""
if port is not None:
pulumi.set(__self__, "port", port)
if server is not None:
pulumi.set(__self__, "server", server)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The number of syslog port.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def server(self) -> Optional[pulumi.Input[str]]:
"""
The address of syslog server.
"""
return pulumi.get(self, "server")
@server.setter
def server(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "server", value)
@pulumi.input_type
class ServerDiskEditParameterArgs:
def __init__(__self__, *,
change_partition_uuid: Optional[pulumi.Input[bool]] = None,
disable_pw_auth: Optional[pulumi.Input[bool]] = None,
enable_dhcp: Optional[pulumi.Input[bool]] = None,
gateway: Optional[pulumi.Input[str]] = None,
hostname: Optional[pulumi.Input[str]] = None,
ip_address: Optional[pulumi.Input[str]] = None,
netmask: Optional[pulumi.Input[int]] = None,
note_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
notes: Optional[pulumi.Input[Sequence[pulumi.Input['ServerDiskEditParameterNoteArgs']]]] = None,
password: Optional[pulumi.Input[str]] = None,
ssh_key_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
ssh_keys: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[bool] change_partition_uuid: The flag to change partition uuid.
:param pulumi.Input[bool] disable_pw_auth: The flag to disable password authentication.
:param pulumi.Input[bool] enable_dhcp: The flag to enable DHCP client.
:param pulumi.Input[str] gateway: The gateway address used by the Server.
:param pulumi.Input[str] hostname: The hostname of the Server. The length of this value must be in the range [`1`-`64`].
:param pulumi.Input[str] ip_address: The IP address to assign to the Server.
:param pulumi.Input[int] netmask: The bit length of the subnet to assign to the Server.
:param pulumi.Input[Sequence[pulumi.Input[str]]] note_ids: A list of the Note id.
Note: **The `note_ids` will be removed in a future version. Please use the `note` instead**
:param pulumi.Input[Sequence[pulumi.Input['ServerDiskEditParameterNoteArgs']]] notes: A list of the `note` block as defined below.
:param pulumi.Input[str] password: The password of default user. The length of this value must be in the range [`8`-`64`].
:param pulumi.Input[Sequence[pulumi.Input[str]]] ssh_key_ids: A list of the SSHKey id.
:param pulumi.Input[Sequence[pulumi.Input[str]]] ssh_keys: A list of the SSHKey text.
"""
if change_partition_uuid is not None:
pulumi.set(__self__, "change_partition_uuid", change_partition_uuid)
if disable_pw_auth is not None:
pulumi.set(__self__, "disable_pw_auth", disable_pw_auth)
if enable_dhcp is not None:
pulumi.set(__self__, "enable_dhcp", enable_dhcp)
if gateway is not None:
pulumi.set(__self__, "gateway", gateway)
if hostname is not None:
pulumi.set(__self__, "hostname", hostname)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
if netmask is not None:
pulumi.set(__self__, "netmask", netmask)
if note_ids is not None:
warnings.warn("""The note_ids field will be removed in a future version. Please use the note field instead""", DeprecationWarning)
pulumi.log.warn("""note_ids is deprecated: The note_ids field will be removed in a future version. Please use the note field instead""")
if note_ids is not None:
pulumi.set(__self__, "note_ids", note_ids)
if notes is not None:
pulumi.set(__self__, "notes", notes)
if password is not None:
pulumi.set(__self__, "password", password)
if ssh_key_ids is not None:
pulumi.set(__self__, "ssh_key_ids", ssh_key_ids)
if ssh_keys is not None:
pulumi.set(__self__, "ssh_keys", ssh_keys)
@property
@pulumi.getter(name="changePartitionUuid")
def change_partition_uuid(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to change partition uuid.
"""
return pulumi.get(self, "change_partition_uuid")
@change_partition_uuid.setter
def change_partition_uuid(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "change_partition_uuid", value)
@property
@pulumi.getter(name="disablePwAuth")
def disable_pw_auth(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to disable password authentication.
"""
return pulumi.get(self, "disable_pw_auth")
@disable_pw_auth.setter
def disable_pw_auth(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disable_pw_auth", value)
@property
@pulumi.getter(name="enableDhcp")
def enable_dhcp(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to enable DHCP client.
"""
return pulumi.get(self, "enable_dhcp")
@enable_dhcp.setter
def enable_dhcp(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_dhcp", value)
@property
@pulumi.getter
def gateway(self) -> Optional[pulumi.Input[str]]:
"""
The gateway address used by the Server.
"""
return pulumi.get(self, "gateway")
@gateway.setter
def gateway(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gateway", value)
@property
@pulumi.getter
def hostname(self) -> Optional[pulumi.Input[str]]:
"""
The hostname of the Server. The length of this value must be in the range [`1`-`64`].
"""
return pulumi.get(self, "hostname")
@hostname.setter
def hostname(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "hostname", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[pulumi.Input[str]]:
"""
The IP address to assign to the Server.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def netmask(self) -> Optional[pulumi.Input[int]]:
"""
The bit length of the subnet to assign to the Server.
"""
return pulumi.get(self, "netmask")
@netmask.setter
def netmask(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "netmask", value)
@property
@pulumi.getter(name="noteIds")
def note_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of the Note id.
Note: **The `note_ids` will be removed in a future version. Please use the `note` instead**
"""
return pulumi.get(self, "note_ids")
@note_ids.setter
def note_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "note_ids", value)
@property
@pulumi.getter
def notes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServerDiskEditParameterNoteArgs']]]]:
"""
A list of the `note` block as defined below.
"""
return pulumi.get(self, "notes")
@notes.setter
def notes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ServerDiskEditParameterNoteArgs']]]]):
pulumi.set(self, "notes", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
The password of default user. The length of this value must be in the range [`8`-`64`].
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter(name="sshKeyIds")
def ssh_key_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of the SSHKey id.
"""
return pulumi.get(self, "ssh_key_ids")
@ssh_key_ids.setter
def ssh_key_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "ssh_key_ids", value)
@property
@pulumi.getter(name="sshKeys")
def ssh_keys(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of the SSHKey text.
"""
return pulumi.get(self, "ssh_keys")
@ssh_keys.setter
def ssh_keys(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "ssh_keys", value)
@pulumi.input_type
class ServerDiskEditParameterNoteArgs:
def __init__(__self__, *,
id: pulumi.Input[str],
api_key_id: Optional[pulumi.Input[str]] = None,
variables: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] id: The id of the Note/StartupScript.
:param pulumi.Input[str] api_key_id: The id of the API key to be injected into the Note/StartupScript when editing the disk.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] variables: The value of the variable that be injected into the Note/StartupScript when editing the disk.
"""
pulumi.set(__self__, "id", id)
if api_key_id is not None:
pulumi.set(__self__, "api_key_id", api_key_id)
if variables is not None:
pulumi.set(__self__, "variables", variables)
@property
@pulumi.getter
def id(self) -> pulumi.Input[str]:
"""
The id of the Note/StartupScript.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: pulumi.Input[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter(name="apiKeyId")
def api_key_id(self) -> Optional[pulumi.Input[str]]:
"""
The id of the API key to be injected into the Note/StartupScript when editing the disk.
"""
return pulumi.get(self, "api_key_id")
@api_key_id.setter
def api_key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_key_id", value)
@property
@pulumi.getter
def variables(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
The value of the variable that be injected into the Note/StartupScript when editing the disk.
"""
return pulumi.get(self, "variables")
@variables.setter
def variables(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "variables", value)
@pulumi.input_type
class ServerNetworkInterfaceArgs:
def __init__(__self__, *,
upstream: pulumi.Input[str],
mac_address: Optional[pulumi.Input[str]] = None,
packet_filter_id: Optional[pulumi.Input[str]] = None,
user_ip_address: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] upstream: The upstream type or upstream switch id. This must be one of [`shared`/`disconnect`/`<switch id>`].
:param pulumi.Input[str] mac_address: The MAC address.
:param pulumi.Input[str] packet_filter_id: The id of the packet filter to attach to the network interface.
:param pulumi.Input[str] user_ip_address: The IP address for only display. This value doesn't affect actual NIC settings.
"""
pulumi.set(__self__, "upstream", upstream)
if mac_address is not None:
pulumi.set(__self__, "mac_address", mac_address)
if packet_filter_id is not None:
pulumi.set(__self__, "packet_filter_id", packet_filter_id)
if user_ip_address is not None:
pulumi.set(__self__, "user_ip_address", user_ip_address)
@property
@pulumi.getter
def upstream(self) -> pulumi.Input[str]:
"""
The upstream type or upstream switch id. This must be one of [`shared`/`disconnect`/`<switch id>`].
"""
return pulumi.get(self, "upstream")
@upstream.setter
def upstream(self, value: pulumi.Input[str]):
pulumi.set(self, "upstream", value)
@property
@pulumi.getter(name="macAddress")
def mac_address(self) -> Optional[pulumi.Input[str]]:
"""
The MAC address.
"""
return pulumi.get(self, "mac_address")
@mac_address.setter
def mac_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mac_address", value)
@property
@pulumi.getter(name="packetFilterId")
def packet_filter_id(self) -> Optional[pulumi.Input[str]]:
"""
The id of the packet filter to attach to the network interface.
"""
return pulumi.get(self, "packet_filter_id")
@packet_filter_id.setter
def packet_filter_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "packet_filter_id", value)
@property
@pulumi.getter(name="userIpAddress")
def user_ip_address(self) -> Optional[pulumi.Input[str]]:
"""
The IP address for only display. This value doesn't affect actual NIC settings.
"""
return pulumi.get(self, "user_ip_address")
@user_ip_address.setter
def user_ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_ip_address", value)
@pulumi.input_type
class SimpleMonitorHealthCheckArgs:
def __init__(__self__, *,
protocol: pulumi.Input[str],
community: Optional[pulumi.Input[str]] = None,
contains_string: Optional[pulumi.Input[str]] = None,
excepcted_data: Optional[pulumi.Input[str]] = None,
ftps: Optional[pulumi.Input[str]] = None,
host_header: Optional[pulumi.Input[str]] = None,
http2: Optional[pulumi.Input[bool]] = None,
oid: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
path: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
qname: Optional[pulumi.Input[str]] = None,
remaining_days: Optional[pulumi.Input[int]] = None,
sni: Optional[pulumi.Input[bool]] = None,
snmp_version: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[int]] = None,
username: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] protocol: The protocol used for health checks. This must be one of [`http`/`https`/`ping`/`tcp`/`dns`/`ssh`/`smtp`/`pop3`/`snmp`/`sslcertificate`/`ftp`].
:param pulumi.Input[str] community: The SNMP community string used when checking by SNMP.
:param pulumi.Input[str] contains_string: The string that should be included in the response body when checking for HTTP/HTTPS.
:param pulumi.Input[str] excepcted_data: The expected value used when checking by DNS.
:param pulumi.Input[str] ftps: The methods of invoking security for monitoring with FTPS. This must be one of [``/`implicit`/`explicit`].
:param pulumi.Input[str] host_header: The value of host header send when checking by HTTP/HTTPS.
:param pulumi.Input[bool] http2: The flag to enable HTTP/2 when checking by HTTPS.
:param pulumi.Input[str] oid: The SNMP OID used when checking by SNMP.
:param pulumi.Input[str] password: The password for basic auth used when checking by HTTP/HTTPS.
:param pulumi.Input[str] path: The path used when checking by HTTP/HTTPS.
:param pulumi.Input[int] port: The target port number.
:param pulumi.Input[str] qname: The FQDN used when checking by DNS.
:param pulumi.Input[int] remaining_days: The number of remaining days until certificate expiration used when checking SSL certificates. This must be in the range [`1`-`9999`].
:param pulumi.Input[bool] sni: The flag to enable SNI when checking by HTTP/HTTPS.
:param pulumi.Input[str] snmp_version: The SNMP version used when checking by SNMP. This must be one of `1`/`2c`.
:param pulumi.Input[int] status: The response-code to expect when checking by HTTP/HTTPS.
:param pulumi.Input[str] username: The user name for basic auth used when checking by HTTP/HTTPS.
"""
pulumi.set(__self__, "protocol", protocol)
if community is not None:
pulumi.set(__self__, "community", community)
if contains_string is not None:
pulumi.set(__self__, "contains_string", contains_string)
if excepcted_data is not None:
pulumi.set(__self__, "excepcted_data", excepcted_data)
if ftps is not None:
pulumi.set(__self__, "ftps", ftps)
if host_header is not None:
pulumi.set(__self__, "host_header", host_header)
if http2 is not None:
pulumi.set(__self__, "http2", http2)
if oid is not None:
pulumi.set(__self__, "oid", oid)
if password is not None:
pulumi.set(__self__, "password", password)
if path is not None:
pulumi.set(__self__, "path", path)
if port is not None:
pulumi.set(__self__, "port", port)
if qname is not None:
pulumi.set(__self__, "qname", qname)
if remaining_days is not None:
pulumi.set(__self__, "remaining_days", remaining_days)
if sni is not None:
pulumi.set(__self__, "sni", sni)
if snmp_version is not None:
pulumi.set(__self__, "snmp_version", snmp_version)
if status is not None:
pulumi.set(__self__, "status", status)
if username is not None:
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[str]:
"""
The protocol used for health checks. This must be one of [`http`/`https`/`ping`/`tcp`/`dns`/`ssh`/`smtp`/`pop3`/`snmp`/`sslcertificate`/`ftp`].
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter
def community(self) -> Optional[pulumi.Input[str]]:
"""
The SNMP community string used when checking by SNMP.
"""
return pulumi.get(self, "community")
@community.setter
def community(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "community", value)
@property
@pulumi.getter(name="containsString")
def contains_string(self) -> Optional[pulumi.Input[str]]:
"""
The string that should be included in the response body when checking for HTTP/HTTPS.
"""
return pulumi.get(self, "contains_string")
@contains_string.setter
def contains_string(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "contains_string", value)
@property
@pulumi.getter(name="excepctedData")
def excepcted_data(self) -> Optional[pulumi.Input[str]]:
"""
The expected value used when checking by DNS.
"""
return pulumi.get(self, "excepcted_data")
@excepcted_data.setter
def excepcted_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "excepcted_data", value)
@property
@pulumi.getter
def ftps(self) -> Optional[pulumi.Input[str]]:
"""
The methods of invoking security for monitoring with FTPS. This must be one of [``/`implicit`/`explicit`].
"""
return pulumi.get(self, "ftps")
@ftps.setter
def ftps(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ftps", value)
@property
@pulumi.getter(name="hostHeader")
def host_header(self) -> Optional[pulumi.Input[str]]:
"""
The value of host header send when checking by HTTP/HTTPS.
"""
return pulumi.get(self, "host_header")
@host_header.setter
def host_header(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_header", value)
@property
@pulumi.getter
def http2(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to enable HTTP/2 when checking by HTTPS.
"""
return pulumi.get(self, "http2")
@http2.setter
def http2(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "http2", value)
@property
@pulumi.getter
def oid(self) -> Optional[pulumi.Input[str]]:
"""
The SNMP OID used when checking by SNMP.
"""
return pulumi.get(self, "oid")
@oid.setter
def oid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "oid", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
The password for basic auth used when checking by HTTP/HTTPS.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
"""
The path used when checking by HTTP/HTTPS.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The target port number.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def qname(self) -> Optional[pulumi.Input[str]]:
"""
The FQDN used when checking by DNS.
"""
return pulumi.get(self, "qname")
@qname.setter
def qname(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "qname", value)
@property
@pulumi.getter(name="remainingDays")
def remaining_days(self) -> Optional[pulumi.Input[int]]:
"""
The number of remaining days until certificate expiration used when checking SSL certificates. This must be in the range [`1`-`9999`].
"""
return pulumi.get(self, "remaining_days")
@remaining_days.setter
def remaining_days(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "remaining_days", value)
@property
@pulumi.getter
def sni(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to enable SNI when checking by HTTP/HTTPS.
"""
return pulumi.get(self, "sni")
@sni.setter
def sni(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "sni", value)
@property
@pulumi.getter(name="snmpVersion")
def snmp_version(self) -> Optional[pulumi.Input[str]]:
"""
The SNMP version used when checking by SNMP. This must be one of `1`/`2c`.
"""
return pulumi.get(self, "snmp_version")
@snmp_version.setter
def snmp_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snmp_version", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[int]]:
"""
The response-code to expect when checking by HTTP/HTTPS.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def username(self) -> Optional[pulumi.Input[str]]:
"""
The user name for basic auth used when checking by HTTP/HTTPS.
"""
return pulumi.get(self, "username")
@username.setter
def username(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "username", value)
@pulumi.input_type
class VPCRouterDhcpServerArgs:
def __init__(__self__, *,
interface_index: pulumi.Input[int],
range_start: pulumi.Input[str],
range_stop: pulumi.Input[str],
dns_servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[int] interface_index: The index of the network interface on which to enable the DHCP service. This must be in the range [`1`-`7`].
:param pulumi.Input[str] range_start: The start value of IP address range to assign to DHCP client.
:param pulumi.Input[str] range_stop: The end value of IP address range to assign to DHCP client.
:param pulumi.Input[Sequence[pulumi.Input[str]]] dns_servers: A list of IP address of DNS server to assign to DHCP client.
"""
pulumi.set(__self__, "interface_index", interface_index)
pulumi.set(__self__, "range_start", range_start)
pulumi.set(__self__, "range_stop", range_stop)
if dns_servers is not None:
pulumi.set(__self__, "dns_servers", dns_servers)
@property
@pulumi.getter(name="interfaceIndex")
def interface_index(self) -> pulumi.Input[int]:
"""
The index of the network interface on which to enable the DHCP service. This must be in the range [`1`-`7`].
"""
return pulumi.get(self, "interface_index")
@interface_index.setter
def interface_index(self, value: pulumi.Input[int]):
pulumi.set(self, "interface_index", value)
@property
@pulumi.getter(name="rangeStart")
def range_start(self) -> pulumi.Input[str]:
"""
The start value of IP address range to assign to DHCP client.
"""
return pulumi.get(self, "range_start")
@range_start.setter
def range_start(self, value: pulumi.Input[str]):
pulumi.set(self, "range_start", value)
@property
@pulumi.getter(name="rangeStop")
def range_stop(self) -> pulumi.Input[str]:
"""
The end value of IP address range to assign to DHCP client.
"""
return pulumi.get(self, "range_stop")
@range_stop.setter
def range_stop(self, value: pulumi.Input[str]):
pulumi.set(self, "range_stop", value)
@property
@pulumi.getter(name="dnsServers")
def dns_servers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of IP address of DNS server to assign to DHCP client.
"""
return pulumi.get(self, "dns_servers")
@dns_servers.setter
def dns_servers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "dns_servers", value)
@pulumi.input_type
class VPCRouterDhcpStaticMappingArgs:
def __init__(__self__, *,
ip_address: pulumi.Input[str],
mac_address: pulumi.Input[str]):
"""
:param pulumi.Input[str] ip_address: The static IP address to assign to DHCP client.
:param pulumi.Input[str] mac_address: The source MAC address of static mapping.
"""
pulumi.set(__self__, "ip_address", ip_address)
pulumi.set(__self__, "mac_address", mac_address)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Input[str]:
"""
The static IP address to assign to DHCP client.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter(name="macAddress")
def mac_address(self) -> pulumi.Input[str]:
"""
The source MAC address of static mapping.
"""
return pulumi.get(self, "mac_address")
@mac_address.setter
def mac_address(self, value: pulumi.Input[str]):
pulumi.set(self, "mac_address", value)
@pulumi.input_type
class VPCRouterFirewallArgs:
def __init__(__self__, *,
direction: pulumi.Input[str],
expressions: pulumi.Input[Sequence[pulumi.Input['VPCRouterFirewallExpressionArgs']]],
interface_index: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] direction: The direction to apply the firewall. This must be one of [`send`/`receive`].
:param pulumi.Input[Sequence[pulumi.Input['VPCRouterFirewallExpressionArgs']]] expressions: One or more `expression` blocks as defined below.
:param pulumi.Input[int] interface_index: The index of the network interface on which to enable filtering. This must be in the range [`0`-`7`].
"""
pulumi.set(__self__, "direction", direction)
pulumi.set(__self__, "expressions", expressions)
if interface_index is not None:
pulumi.set(__self__, "interface_index", interface_index)
@property
@pulumi.getter
def direction(self) -> pulumi.Input[str]:
"""
The direction to apply the firewall. This must be one of [`send`/`receive`].
"""
return pulumi.get(self, "direction")
@direction.setter
def direction(self, value: pulumi.Input[str]):
pulumi.set(self, "direction", value)
@property
@pulumi.getter
def expressions(self) -> pulumi.Input[Sequence[pulumi.Input['VPCRouterFirewallExpressionArgs']]]:
"""
One or more `expression` blocks as defined below.
"""
return pulumi.get(self, "expressions")
@expressions.setter
def expressions(self, value: pulumi.Input[Sequence[pulumi.Input['VPCRouterFirewallExpressionArgs']]]):
pulumi.set(self, "expressions", value)
@property
@pulumi.getter(name="interfaceIndex")
def interface_index(self) -> Optional[pulumi.Input[int]]:
"""
The index of the network interface on which to enable filtering. This must be in the range [`0`-`7`].
"""
return pulumi.get(self, "interface_index")
@interface_index.setter
def interface_index(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "interface_index", value)
@pulumi.input_type
class VPCRouterFirewallExpressionArgs:
def __init__(__self__, *,
allow: pulumi.Input[bool],
protocol: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
destination_network: Optional[pulumi.Input[str]] = None,
destination_port: Optional[pulumi.Input[str]] = None,
logging: Optional[pulumi.Input[bool]] = None,
source_network: Optional[pulumi.Input[str]] = None,
source_port: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[bool] allow: The flag to allow the packet through the filter.
:param pulumi.Input[str] protocol: The protocol used for filtering. This must be one of [`tcp`/`udp`/`icmp`/`ip`].
:param pulumi.Input[str] description: The description of the expression. The length of this value must be in the range [`0`-`512`].
:param pulumi.Input[str] destination_network: A destination IP address or CIDR block used for filtering (e.g. `192.0.2.1`, `192.0.2.0/24`).
:param pulumi.Input[str] destination_port: A destination port number or port range used for filtering (e.g. `1024`, `1024-2048`). This is only used when `protocol` is `tcp` or `udp`.
:param pulumi.Input[bool] logging: The flag to enable packet logging when matching the expression.
:param pulumi.Input[str] source_network: A source IP address or CIDR block used for filtering (e.g. `192.0.2.1`, `192.0.2.0/24`).
:param pulumi.Input[str] source_port: A source port number or port range used for filtering (e.g. `1024`, `1024-2048`). This is only used when `protocol` is `tcp` or `udp`.
"""
pulumi.set(__self__, "allow", allow)
pulumi.set(__self__, "protocol", protocol)
if description is not None:
pulumi.set(__self__, "description", description)
if destination_network is not None:
pulumi.set(__self__, "destination_network", destination_network)
if destination_port is not None:
pulumi.set(__self__, "destination_port", destination_port)
if logging is not None:
pulumi.set(__self__, "logging", logging)
if source_network is not None:
pulumi.set(__self__, "source_network", source_network)
if source_port is not None:
pulumi.set(__self__, "source_port", source_port)
@property
@pulumi.getter
def allow(self) -> pulumi.Input[bool]:
"""
The flag to allow the packet through the filter.
"""
return pulumi.get(self, "allow")
@allow.setter
def allow(self, value: pulumi.Input[bool]):
pulumi.set(self, "allow", value)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[str]:
"""
The protocol used for filtering. This must be one of [`tcp`/`udp`/`icmp`/`ip`].
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the expression. The length of this value must be in the range [`0`-`512`].
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="destinationNetwork")
def destination_network(self) -> Optional[pulumi.Input[str]]:
"""
A destination IP address or CIDR block used for filtering (e.g. `192.0.2.1`, `192.0.2.0/24`).
"""
return pulumi.get(self, "destination_network")
@destination_network.setter
def destination_network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "destination_network", value)
@property
@pulumi.getter(name="destinationPort")
def destination_port(self) -> Optional[pulumi.Input[str]]:
"""
A destination port number or port range used for filtering (e.g. `1024`, `1024-2048`). This is only used when `protocol` is `tcp` or `udp`.
"""
return pulumi.get(self, "destination_port")
@destination_port.setter
def destination_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "destination_port", value)
@property
@pulumi.getter
def logging(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to enable packet logging when matching the expression.
"""
return pulumi.get(self, "logging")
@logging.setter
def logging(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "logging", value)
@property
@pulumi.getter(name="sourceNetwork")
def source_network(self) -> Optional[pulumi.Input[str]]:
"""
A source IP address or CIDR block used for filtering (e.g. `192.0.2.1`, `192.0.2.0/24`).
"""
return pulumi.get(self, "source_network")
@source_network.setter
def source_network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_network", value)
@property
@pulumi.getter(name="sourcePort")
def source_port(self) -> Optional[pulumi.Input[str]]:
"""
A source port number or port range used for filtering (e.g. `1024`, `1024-2048`). This is only used when `protocol` is `tcp` or `udp`.
"""
return pulumi.get(self, "source_port")
@source_port.setter
def source_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_port", value)
@pulumi.input_type
class VPCRouterL2tpArgs:
def __init__(__self__, *,
pre_shared_secret: pulumi.Input[str],
range_start: pulumi.Input[str],
range_stop: pulumi.Input[str]):
"""
:param pulumi.Input[str] pre_shared_secret: The pre shared secret for the VPN. The length of this value must be in the range [`0`-`40`].
:param pulumi.Input[str] range_start: The start value of IP address range to assign to DHCP client.
:param pulumi.Input[str] range_stop: The end value of IP address range to assign to DHCP client.
"""
pulumi.set(__self__, "pre_shared_secret", pre_shared_secret)
pulumi.set(__self__, "range_start", range_start)
pulumi.set(__self__, "range_stop", range_stop)
@property
@pulumi.getter(name="preSharedSecret")
def pre_shared_secret(self) -> pulumi.Input[str]:
"""
The pre shared secret for the VPN. The length of this value must be in the range [`0`-`40`].
"""
return pulumi.get(self, "pre_shared_secret")
@pre_shared_secret.setter
def pre_shared_secret(self, value: pulumi.Input[str]):
pulumi.set(self, "pre_shared_secret", value)
@property
@pulumi.getter(name="rangeStart")
def range_start(self) -> pulumi.Input[str]:
"""
The start value of IP address range to assign to DHCP client.
"""
return pulumi.get(self, "range_start")
@range_start.setter
def range_start(self, value: pulumi.Input[str]):
pulumi.set(self, "range_start", value)
@property
@pulumi.getter(name="rangeStop")
def range_stop(self) -> pulumi.Input[str]:
"""
The end value of IP address range to assign to DHCP client.
"""
return pulumi.get(self, "range_stop")
@range_stop.setter
def range_stop(self, value: pulumi.Input[str]):
pulumi.set(self, "range_stop", value)
@pulumi.input_type
class VPCRouterPortForwardingArgs:
def __init__(__self__, *,
private_ip: pulumi.Input[str],
private_port: pulumi.Input[int],
protocol: pulumi.Input[str],
public_port: pulumi.Input[int],
description: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] private_ip: The destination ip address of the port forwarding.
:param pulumi.Input[int] private_port: The destination port number of the port forwarding. This will be a port number on a private network.
:param pulumi.Input[str] protocol: The protocol used for port forwarding. This must be one of [`tcp`/`udp`].
:param pulumi.Input[int] public_port: The source port number of the port forwarding. This must be a port number on a public network.
:param pulumi.Input[str] description: The description of the port forwarding. The length of this value must be in the range [`0`-`512`].
"""
pulumi.set(__self__, "private_ip", private_ip)
pulumi.set(__self__, "private_port", private_port)
pulumi.set(__self__, "protocol", protocol)
pulumi.set(__self__, "public_port", public_port)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter(name="privateIp")
def private_ip(self) -> pulumi.Input[str]:
"""
The destination ip address of the port forwarding.
"""
return pulumi.get(self, "private_ip")
@private_ip.setter
def private_ip(self, value: pulumi.Input[str]):
pulumi.set(self, "private_ip", value)
@property
@pulumi.getter(name="privatePort")
def private_port(self) -> pulumi.Input[int]:
"""
The destination port number of the port forwarding. This will be a port number on a private network.
"""
return pulumi.get(self, "private_port")
@private_port.setter
def private_port(self, value: pulumi.Input[int]):
pulumi.set(self, "private_port", value)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[str]:
"""
The protocol used for port forwarding. This must be one of [`tcp`/`udp`].
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="publicPort")
def public_port(self) -> pulumi.Input[int]:
"""
The source port number of the port forwarding. This must be a port number on a public network.
"""
return pulumi.get(self, "public_port")
@public_port.setter
def public_port(self, value: pulumi.Input[int]):
pulumi.set(self, "public_port", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the port forwarding. The length of this value must be in the range [`0`-`512`].
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@pulumi.input_type
class VPCRouterPptpArgs:
def __init__(__self__, *,
range_start: pulumi.Input[str],
range_stop: pulumi.Input[str]):
"""
:param pulumi.Input[str] range_start: The start value of IP address range to assign to PPTP client.
:param pulumi.Input[str] range_stop: The end value of IP address range to assign to PPTP client.
"""
pulumi.set(__self__, "range_start", range_start)
pulumi.set(__self__, "range_stop", range_stop)
@property
@pulumi.getter(name="rangeStart")
def range_start(self) -> pulumi.Input[str]:
"""
The start value of IP address range to assign to PPTP client.
"""
return pulumi.get(self, "range_start")
@range_start.setter
def range_start(self, value: pulumi.Input[str]):
pulumi.set(self, "range_start", value)
@property
@pulumi.getter(name="rangeStop")
def range_stop(self) -> pulumi.Input[str]:
"""
The end value of IP address range to assign to PPTP client.
"""
return pulumi.get(self, "range_stop")
@range_stop.setter
def range_stop(self, value: pulumi.Input[str]):
pulumi.set(self, "range_stop", value)
@pulumi.input_type
class VPCRouterPrivateNetworkInterfaceArgs:
def __init__(__self__, *,
index: pulumi.Input[int],
ip_addresses: pulumi.Input[Sequence[pulumi.Input[str]]],
netmask: pulumi.Input[int],
switch_id: pulumi.Input[str],
vip: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[int] index: The index of the network interface. This must be in the range [`1`-`7`].
:param pulumi.Input[Sequence[pulumi.Input[str]]] ip_addresses: A list of ip address to assign to the network interface. This is required only one value when `plan` is `standard`, two values otherwise.
:param pulumi.Input[int] netmask: The bit length of the subnet to assign to the network interface.
:param pulumi.Input[str] switch_id: The id of the connected switch.
:param pulumi.Input[str] vip: The virtual IP address to assign to the network interface. This is only required when `plan` is not `standard`.
"""
pulumi.set(__self__, "index", index)
pulumi.set(__self__, "ip_addresses", ip_addresses)
pulumi.set(__self__, "netmask", netmask)
pulumi.set(__self__, "switch_id", switch_id)
if vip is not None:
pulumi.set(__self__, "vip", vip)
@property
@pulumi.getter
def index(self) -> pulumi.Input[int]:
"""
The index of the network interface. This must be in the range [`1`-`7`].
"""
return pulumi.get(self, "index")
@index.setter
def index(self, value: pulumi.Input[int]):
pulumi.set(self, "index", value)
@property
@pulumi.getter(name="ipAddresses")
def ip_addresses(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A list of ip address to assign to the network interface. This is required only one value when `plan` is `standard`, two values otherwise.
"""
return pulumi.get(self, "ip_addresses")
@ip_addresses.setter
def ip_addresses(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "ip_addresses", value)
@property
@pulumi.getter
def netmask(self) -> pulumi.Input[int]:
"""
The bit length of the subnet to assign to the network interface.
"""
return pulumi.get(self, "netmask")
@netmask.setter
def netmask(self, value: pulumi.Input[int]):
pulumi.set(self, "netmask", value)
@property
@pulumi.getter(name="switchId")
def switch_id(self) -> pulumi.Input[str]:
"""
The id of the connected switch.
"""
return pulumi.get(self, "switch_id")
@switch_id.setter
def switch_id(self, value: pulumi.Input[str]):
pulumi.set(self, "switch_id", value)
@property
@pulumi.getter
def vip(self) -> Optional[pulumi.Input[str]]:
"""
The virtual IP address to assign to the network interface. This is only required when `plan` is not `standard`.
"""
return pulumi.get(self, "vip")
@vip.setter
def vip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vip", value)
@pulumi.input_type
class VPCRouterPublicNetworkInterfaceArgs:
def __init__(__self__, *,
aliases: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
switch_id: Optional[pulumi.Input[str]] = None,
vip: Optional[pulumi.Input[str]] = None,
vrid: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input[str]]] aliases: A list of ip alias to assign to the VPC Router. This can only be specified if `plan` is not `standard`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] ip_addresses: The list of the IP address to assign to the VPC Router. This is required only one value when `plan` is `standard`, two values otherwise.
:param pulumi.Input[str] switch_id: The id of the switch to connect. This is only required when when `plan` is not `standard`.
:param pulumi.Input[str] vip: The virtual IP address of the VPC Router. This is only required when `plan` is not `standard`.
:param pulumi.Input[int] vrid: The Virtual Router Identifier. This is only required when `plan` is not `standard`.
"""
if aliases is not None:
pulumi.set(__self__, "aliases", aliases)
if ip_addresses is not None:
pulumi.set(__self__, "ip_addresses", ip_addresses)
if switch_id is not None:
pulumi.set(__self__, "switch_id", switch_id)
if vip is not None:
pulumi.set(__self__, "vip", vip)
if vrid is not None:
pulumi.set(__self__, "vrid", vrid)
@property
@pulumi.getter
def aliases(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of ip alias to assign to the VPC Router. This can only be specified if `plan` is not `standard`.
"""
return pulumi.get(self, "aliases")
@aliases.setter
def aliases(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "aliases", value)
@property
@pulumi.getter(name="ipAddresses")
def ip_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The list of the IP address to assign to the VPC Router. This is required only one value when `plan` is `standard`, two values otherwise.
"""
return pulumi.get(self, "ip_addresses")
@ip_addresses.setter
def ip_addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "ip_addresses", value)
@property
@pulumi.getter(name="switchId")
def switch_id(self) -> Optional[pulumi.Input[str]]:
"""
The id of the switch to connect. This is only required when when `plan` is not `standard`.
"""
return pulumi.get(self, "switch_id")
@switch_id.setter
def switch_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "switch_id", value)
@property
@pulumi.getter
def vip(self) -> Optional[pulumi.Input[str]]:
"""
The virtual IP address of the VPC Router. This is only required when `plan` is not `standard`.
"""
return pulumi.get(self, "vip")
@vip.setter
def vip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vip", value)
@property
@pulumi.getter
def vrid(self) -> Optional[pulumi.Input[int]]:
"""
The Virtual Router Identifier. This is only required when `plan` is not `standard`.
"""
return pulumi.get(self, "vrid")
@vrid.setter
def vrid(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "vrid", value)
@pulumi.input_type
class VPCRouterSiteToSiteVpnArgs:
def __init__(__self__, *,
local_prefixes: pulumi.Input[Sequence[pulumi.Input[str]]],
peer: pulumi.Input[str],
pre_shared_secret: pulumi.Input[str],
remote_id: pulumi.Input[str],
routes: pulumi.Input[Sequence[pulumi.Input[str]]]):
"""
:param pulumi.Input[Sequence[pulumi.Input[str]]] local_prefixes: A list of CIDR block of the network under the VPC Router.
:param pulumi.Input[str] peer: The IP address of the opposing appliance connected to the VPC Router.
:param pulumi.Input[str] pre_shared_secret: The pre shared secret for the VPN. The length of this value must be in the range [`0`-`40`].
:param pulumi.Input[str] remote_id: The id of the opposing appliance connected to the VPC Router. This is typically set same as value of `peer`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] routes: A list of CIDR block of VPN connected networks.
"""
pulumi.set(__self__, "local_prefixes", local_prefixes)
pulumi.set(__self__, "peer", peer)
pulumi.set(__self__, "pre_shared_secret", pre_shared_secret)
pulumi.set(__self__, "remote_id", remote_id)
pulumi.set(__self__, "routes", routes)
@property
@pulumi.getter(name="localPrefixes")
def local_prefixes(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A list of CIDR block of the network under the VPC Router.
"""
return pulumi.get(self, "local_prefixes")
@local_prefixes.setter
def local_prefixes(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "local_prefixes", value)
@property
@pulumi.getter
def peer(self) -> pulumi.Input[str]:
"""
The IP address of the opposing appliance connected to the VPC Router.
"""
return pulumi.get(self, "peer")
@peer.setter
def peer(self, value: pulumi.Input[str]):
pulumi.set(self, "peer", value)
@property
@pulumi.getter(name="preSharedSecret")
def pre_shared_secret(self) -> pulumi.Input[str]:
"""
The pre shared secret for the VPN. The length of this value must be in the range [`0`-`40`].
"""
return pulumi.get(self, "pre_shared_secret")
@pre_shared_secret.setter
def pre_shared_secret(self, value: pulumi.Input[str]):
pulumi.set(self, "pre_shared_secret", value)
@property
@pulumi.getter(name="remoteId")
def remote_id(self) -> pulumi.Input[str]:
"""
The id of the opposing appliance connected to the VPC Router. This is typically set same as value of `peer`.
"""
return pulumi.get(self, "remote_id")
@remote_id.setter
def remote_id(self, value: pulumi.Input[str]):
pulumi.set(self, "remote_id", value)
@property
@pulumi.getter
def routes(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A list of CIDR block of VPN connected networks.
"""
return pulumi.get(self, "routes")
@routes.setter
def routes(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "routes", value)
@pulumi.input_type
class VPCRouterStaticNatArgs:
def __init__(__self__, *,
private_ip: pulumi.Input[str],
public_ip: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] private_ip: The private IP address used for the static NAT.
:param pulumi.Input[str] public_ip: The public IP address used for the static NAT.
:param pulumi.Input[str] description: The description of the static nat. The length of this value must be in the range [`0`-`512`].
"""
pulumi.set(__self__, "private_ip", private_ip)
pulumi.set(__self__, "public_ip", public_ip)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter(name="privateIp")
def private_ip(self) -> pulumi.Input[str]:
"""
The private IP address used for the static NAT.
"""
return pulumi.get(self, "private_ip")
@private_ip.setter
def private_ip(self, value: pulumi.Input[str]):
pulumi.set(self, "private_ip", value)
@property
@pulumi.getter(name="publicIp")
def public_ip(self) -> pulumi.Input[str]:
"""
The public IP address used for the static NAT.
"""
return pulumi.get(self, "public_ip")
@public_ip.setter
def public_ip(self, value: pulumi.Input[str]):
pulumi.set(self, "public_ip", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the static nat. The length of this value must be in the range [`0`-`512`].
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@pulumi.input_type
class VPCRouterStaticRouteArgs:
def __init__(__self__, *,
next_hop: pulumi.Input[str],
prefix: pulumi.Input[str]):
"""
:param pulumi.Input[str] next_hop: The IP address of the next hop.
:param pulumi.Input[str] prefix: The CIDR block of destination.
"""
pulumi.set(__self__, "next_hop", next_hop)
pulumi.set(__self__, "prefix", prefix)
@property
@pulumi.getter(name="nextHop")
def next_hop(self) -> pulumi.Input[str]:
"""
The IP address of the next hop.
"""
return pulumi.get(self, "next_hop")
@next_hop.setter
def next_hop(self, value: pulumi.Input[str]):
pulumi.set(self, "next_hop", value)
@property
@pulumi.getter
def prefix(self) -> pulumi.Input[str]:
"""
The CIDR block of destination.
"""
return pulumi.get(self, "prefix")
@prefix.setter
def prefix(self, value: pulumi.Input[str]):
pulumi.set(self, "prefix", value)
@pulumi.input_type
class VPCRouterUserArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
password: pulumi.Input[str]):
"""
:param pulumi.Input[str] name: The user name used to authenticate remote access.
:param pulumi.Input[str] password: The password used to authenticate remote access.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "password", password)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The user name used to authenticate remote access.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def password(self) -> pulumi.Input[str]:
"""
The password used to authenticate remote access.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: pulumi.Input[str]):
pulumi.set(self, "password", value)
@pulumi.input_type
class VPCRouterWireGuardArgs:
def __init__(__self__, *,
ip_address: pulumi.Input[str],
peers: Optional[pulumi.Input[Sequence[pulumi.Input['VPCRouterWireGuardPeerArgs']]]] = None,
public_key: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] ip_address: The IP address for WireGuard server. This must be formatted with xxx.xxx.xxx.xxx/nn.
:param pulumi.Input[Sequence[pulumi.Input['VPCRouterWireGuardPeerArgs']]] peers: One or more `peer` blocks as defined below.
:param pulumi.Input[str] public_key: the public key of the WireGuard client.
"""
pulumi.set(__self__, "ip_address", ip_address)
if peers is not None:
pulumi.set(__self__, "peers", peers)
if public_key is not None:
pulumi.set(__self__, "public_key", public_key)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Input[str]:
"""
The IP address for WireGuard server. This must be formatted with xxx.xxx.xxx.xxx/nn.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def peers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VPCRouterWireGuardPeerArgs']]]]:
"""
One or more `peer` blocks as defined below.
"""
return pulumi.get(self, "peers")
@peers.setter
def peers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VPCRouterWireGuardPeerArgs']]]]):
pulumi.set(self, "peers", value)
@property
@pulumi.getter(name="publicKey")
def public_key(self) -> Optional[pulumi.Input[str]]:
"""
the public key of the WireGuard client.
"""
return pulumi.get(self, "public_key")
@public_key.setter
def public_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "public_key", value)
@pulumi.input_type
class VPCRouterWireGuardPeerArgs:
def __init__(__self__, *,
ip_address: pulumi.Input[str],
name: pulumi.Input[str],
public_key: pulumi.Input[str]):
"""
:param pulumi.Input[str] ip_address: The IP address for peer.
:param pulumi.Input[str] name: the of the peer.
:param pulumi.Input[str] public_key: the public key of the WireGuard client.
"""
pulumi.set(__self__, "ip_address", ip_address)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "public_key", public_key)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Input[str]:
"""
The IP address for peer.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
the of the peer.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="publicKey")
def public_key(self) -> pulumi.Input[str]:
"""
the public key of the WireGuard client.
"""
return pulumi.get(self, "public_key")
@public_key.setter
def public_key(self, value: pulumi.Input[str]):
pulumi.set(self, "public_key", value)
@pulumi.input_type
class GetArchiveFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetArchiveFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetArchiveFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetArchiveFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetArchiveFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetArchiveFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetBridgeFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetBridgeFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None):
"""
:param Sequence['GetBridgeFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetBridgeFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetBridgeFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@pulumi.input_type
class GetBridgeFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetCDROMFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetCDROMFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetCDROMFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetCDROMFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetCDROMFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetCDROMFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetCertificateAuthorityFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetCertificateAuthorityFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetCertificateAuthorityFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetCertificateAuthorityFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetCertificateAuthorityFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetCertificateAuthorityFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetContainerRegistryFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetContainerRegistryFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetContainerRegistryFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetContainerRegistryFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetContainerRegistryFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetContainerRegistryFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetDNSFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetDNSFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetDNSFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetDNSFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetDNSFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetDNSFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetDatabaseFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetDatabaseFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetDatabaseFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetDatabaseFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetDatabaseFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetDatabaseFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetDiskFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetDiskFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetDiskFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetDiskFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetDiskFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetDiskFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetESMEFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetESMEFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetESMEFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetESMEFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetESMEFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetESMEFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetEnhancedDBFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetEnhancedDBFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetEnhancedDBFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetEnhancedDBFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetEnhancedDBFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetEnhancedDBFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetGSLBFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetGSLBFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetGSLBFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetGSLBFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetGSLBFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetGSLBFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetIconFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetIconFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetIconFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetIconFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetIconFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetIconFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetInternetFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetInternetFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetInternetFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetInternetFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetInternetFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetInternetFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetLoadBalancerFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetLoadBalancerFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetLoadBalancerFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetLoadBalancerFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetLoadBalancerFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetLoadBalancerFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetLocalRouterFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetLocalRouterFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetLocalRouterFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetLocalRouterFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetLocalRouterFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetLocalRouterFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetNFSFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetNFSFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetNFSFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetNFSFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetNFSFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetNFSFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetNoteFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetNoteFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetNoteFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetNoteFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetNoteFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetNoteFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetPacketFilterFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetPacketFilterFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None):
"""
:param Sequence['GetPacketFilterFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetPacketFilterFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetPacketFilterFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@pulumi.input_type
class GetPacketFilterFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetPrivateHostFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetPrivateHostFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetPrivateHostFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetPrivateHostFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetPrivateHostFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetPrivateHostFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetProxyLBFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetProxyLBFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetProxyLBFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetProxyLBFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetProxyLBFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetProxyLBFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetSSHKeyFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetSSHKeyFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None):
"""
:param Sequence['GetSSHKeyFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetSSHKeyFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetSSHKeyFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@pulumi.input_type
class GetSSHKeyFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetServerFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetServerFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetServerFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetServerFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetServerFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetServerFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetSimpleMonitorFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetSimpleMonitorFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetSimpleMonitorFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetSimpleMonitorFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetSimpleMonitorFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetSimpleMonitorFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetSwitchFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetSwitchFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetSwitchFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetSwitchFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetSwitchFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetSwitchFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
@pulumi.input_type
class GetVPCRouterFilterArgs:
def __init__(__self__, *,
conditions: Optional[Sequence['GetVPCRouterFilterConditionArgs']] = None,
id: Optional[str] = None,
names: Optional[Sequence[str]] = None,
tags: Optional[Sequence[str]] = None):
"""
:param Sequence['GetVPCRouterFilterConditionArgs'] conditions: One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
:param str id: The resource id on SakuraCloud used for filtering.
:param Sequence[str] names: The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
:param Sequence[str] tags: The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if id is not None:
pulumi.set(__self__, "id", id)
if names is not None:
pulumi.set(__self__, "names", names)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def conditions(self) -> Optional[Sequence['GetVPCRouterFilterConditionArgs']]:
"""
One or more name/values pairs used for filtering. There are several valid keys, for a full reference, check out finding section in the [SakuraCloud API reference](https://developer.sakura.ad.jp/cloud/api/1.1/).
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[Sequence['GetVPCRouterFilterConditionArgs']]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource id on SakuraCloud used for filtering.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def names(self) -> Optional[Sequence[str]]:
"""
The resource names on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "names")
@names.setter
def names(self, value: Optional[Sequence[str]]):
pulumi.set(self, "names", value)
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The resource tags on SakuraCloud used for filtering. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[Sequence[str]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class GetVPCRouterFilterConditionArgs:
def __init__(__self__, *,
name: str,
values: Sequence[str]):
"""
:param str name: The name of the target field. This value is case-sensitive.
:param Sequence[str] values: The values of the condition. If multiple values are specified, they combined as AND condition.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the target field. This value is case-sensitive.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: str):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The values of the condition. If multiple values are specified, they combined as AND condition.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Sequence[str]):
pulumi.set(self, "values", value)
| 37.614653 | 292 | 0.631089 | 33,412 | 276,731 | 5.102119 | 0.020412 | 0.082788 | 0.063295 | 0.046254 | 0.909798 | 0.880936 | 0.85204 | 0.828036 | 0.811981 | 0.785208 | 0 | 0.003758 | 0.251985 | 276,731 | 7,356 | 293 | 37.619766 | 0.819782 | 0.294391 | 0 | 0.774944 | 1 | 0.000226 | 0.097116 | 0.032737 | 0 | 0 | 0 | 0 | 0 | 1 | 0.211512 | false | 0.006772 | 0.001129 | 0 | 0.330474 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
7b8397a7c260c356d53c55cadf97d81011bc1e6e | 120 | py | Python | setup.py | chib0/asd-winter2019 | c7d95305b1e8b99013fd40da1e7ebe01c2d0102a | [
"Apache-2.0"
] | null | null | null | setup.py | chib0/asd-winter2019 | c7d95305b1e8b99013fd40da1e7ebe01c2d0102a | [
"Apache-2.0"
] | 4 | 2021-02-02T22:38:53.000Z | 2022-01-13T02:32:33.000Z | setup.py | chib0/asd-winter2019 | c7d95305b1e8b99013fd40da1e7ebe01c2d0102a | [
"Apache-2.0"
] | null | null | null | # todo: look at python -m grpc_tools.protoc cortex/core/net_messages.proto -I cortex/core --python_out /tmp/protoc_test
| 60 | 119 | 0.791667 | 21 | 120 | 4.333333 | 0.809524 | 0.21978 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 120 | 1 | 120 | 120 | 0.842593 | 0.975 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 1 | null | 1 | null | true | 0 | 0 | null | null | null | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c8850314e0c85f6383e9c1364856fc160fb7944b | 51,710 | py | Python | src/rufus/third_party/dubins_airplane/DubinsAirplaneFunctions.py | jawallace/rufus | 81b49604f1fa558ee4acff510f5d5fd19668278d | [
"MIT"
] | 2 | 2020-03-04T08:55:10.000Z | 2021-08-12T03:28:01.000Z | src/rufus/third_party/dubins_airplane/DubinsAirplaneFunctions.py | jawallace/rufus | 81b49604f1fa558ee4acff510f5d5fd19668278d | [
"MIT"
] | null | null | null | src/rufus/third_party/dubins_airplane/DubinsAirplaneFunctions.py | jawallace/rufus | 81b49604f1fa558ee4acff510f5d5fd19668278d | [
"MIT"
] | null | null | null | # __DUBINSAIRPLANEFUNCTIONS__
# The functions here implement the 3D Dubins Airplane model with totally
# 16 cases of possible trajectories
#
# Authors:
# Kostas Alexis (konstantinos.alexis@mavt.ethz.ch)
from __future__ import division
import numpy as np
from math import tan, sin, cos, atan2, fmod, acos, asin, pow, sqrt, fabs,atan
pi = np.pi
# create the results dictionary
DubinsAirplaneSolution = { }
DubinsAirplaneSolution['case'] = 0
DubinsAirplaneSolution['p_s'] = np.zeros((3,1))
DubinsAirplaneSolution['angl_s'] = 0
DubinsAirplaneSolution['p_e'] = np.zeros((3,1))
DubinsAirplaneSolution['R'] = 0
DubinsAirplaneSolution['gamma'] = 0
DubinsAirplaneSolution['L'] = 0
DubinsAirplaneSolution['c_s'] = np.zeros((3,1))
DubinsAirplaneSolution['psi_s'] = 0
DubinsAirplaneSolution['lamda_s'] = 0
DubinsAirplaneSolution['lamda_si'] = 0
DubinsAirplaneSolution['k_s'] = 0
DubinsAirplaneSolution['c_ei'] = np.zeros((3,1))
DubinsAirplaneSolution['c_si'] = np.zeros((3,1))
DubinsAirplaneSolution['psi_ei'] = 0
DubinsAirplaneSolution['lamda_ei'] = 0
DubinsAirplaneSolution['psi_si'] = 0
DubinsAirplaneSolution['k_ei'] = 0
DubinsAirplaneSolution['c_e'] = 0
DubinsAirplaneSolution['k_si'] = 0
DubinsAirplaneSolution['psi_e'] = 0
DubinsAirplaneSolution['lamda_e'] = 0
DubinsAirplaneSolution['k_e'] = 0
DubinsAirplaneSolution['w_s'] = np.zeros((3,1))
DubinsAirplaneSolution['q_s'] = np.zeros((3,1))
DubinsAirplaneSolution['w_si'] = np.zeros((3,1))
DubinsAirplaneSolution['q_si'] = np.zeros((3,1))
DubinsAirplaneSolution['w_l'] = np.zeros((3,1))
DubinsAirplaneSolution['q_l'] = np.zeros((3,1))
DubinsAirplaneSolution['w_ei'] = np.zeros((3,1))
DubinsAirplaneSolution['q_ei'] = np.zeros((3,1))
DubinsAirplaneSolution['w_e'] = np.zeros((3,1))
DubinsAirplaneSolution['q_e'] = np.zeros((3,1))
def roty(theta=None):
# Rotation around y
R = np.array([ [cos(theta), 0, sin(theta)],
[0, 1, 0],
[-sin(theta), 0, cos(theta)] ])
return R
def rotz(theta=None):
# Rotation around z
R = np.array([ [cos(theta), -sin(theta), 0],
[sin(theta), cos(theta), 0],
[0, 0, 1] ])
return R
def computeDubinsRSR(R=None, crs=None, cre=None, anglstart=None,anglend=None):
# Compute Dubins RSR
theta = atan2( cre[1]-crs[1], cre[0]-crs[0] )
L = np.linalg.norm( crs[0:2]-cre[0:2],ord=2 ) + R* fmod( 2*pi+fmod(theta-pi/2,2*pi )-fmod( anglstart-pi/2,2*pi),2*pi ) + R*fmod( 2*pi+fmod(anglend-pi/2,2*pi )-fmod( theta-pi/2,2*pi),2*pi )
return L
def computeDubinsLSL(R=None, cls=None, cle=None, anglstart=None, anglend=None):
# Compute Dubins LSL
theta = atan2( cle[1]-cls[1], cle[0]-cls[0] )
L = np.linalg.norm(cls[0:2]-cle[0:2]) + R*fmod( 2*pi-fmod(theta+pi/2,2*pi)+fmod(anglstart+pi/2,2*pi),2*pi ) + R*fmod( 2*pi-fmod(anglend+pi/2,2*pi)+fmod(theta+pi/2,2*pi),2*pi )
return L
def computeDubinsLSR(R=None, cls=None, cre=None, anglstart=None, anglend=None):
# Compute Dubins LSR
ell = np.linalg.norm( cre[0:2]-cls[0:2],ord=2 )
theta = atan2( cre[1]-cls[1],cre[0]-cls[0] )
acos_value = 2 * R / ell
if fabs( acos_value ) > 1:
flag_zero = 1
else:
flag_zero = 0
acos_value = max( acos_value,-1 )
acos_value = min( acos_value,1 )
if ell == 0:
theta2 = 0
else:
theta2 = acos( acos_value )
if flag_zero == 1:
theta2 = 0
if theta2 == 0:
L = pow(10.0,8)
else:
L = sqrt( pow(ell,2) - 4*pow(R,2) ) + R*fmod( 2*pi-fmod(theta+theta2,2*pi ) + fmod( anglstart+pi/2,2*pi),2*pi ) + R*fmod( 2*pi-fmod(theta+theta2-pi,2*pi)+fmod(anglend-pi/2,2*pi),2*pi )
return L
def computeDubinsRSL(R=None, crs=None, cle=None, anglstart=None, anglend=None):
# Compute Dubins RSL
ell = np.linalg.norm( cle[0:2]-crs[0:2],ord=2 )
theta = atan2( cle[1]-crs[1],cle[0]-crs[0] )
asin_value = 2 * R / ell
if fabs( asin_value ) > 1:
flag_zero = 1
else:
flag_zero = 0
asin_value = max( asin_value, -1 )
asin_value = min( asin_value, 1 )
if ell == 0:
theta2 = 0
else:
theta2 = theta - pi/2 + asin( asin_value )
if theta2 == 0:
L = pow( 10.0, 8 )
else:
L = sqrt( fabs(pow(ell,2)-4 * pow(R,2)) ) + R * fmod( 2 * pi + fmod( theta2, 2 * pi )-fmod( anglstart-pi/2, 2 * pi ), 2 * pi ) + R * fmod( 2 * pi + fmod( theta2 + pi, 2 * pi )-fmod( anglend + pi / 2, 2 * pi ), 2 * pi )
return L
def computeOptimalRadius(zs=None, anglstart=None, ze=None, anglend=None, R_min=None, gamma_max=None, idx=None, k=None, hdist=None):
# Compute Optimal Radius
R1 = R_min
R2 = 2 * R_min
R = ( R1 + R2 ) / 2
if idx == 1:
error = 1
while fabs( error ) > 0.1:
crs = zs + R * np.dot( rotz( pi / 2 ),np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cre = ze + R * np.dot( rotz( pi / 2 ), np.array( [cos(anglend), sin(anglend), 0] ).T )
L = computeDubinsRSR(R, crs, cre, anglstart, anglend)
error = ( L + 2 * pi * k * R ) - fabs( hdist ) / tan( gamma_max )
if error > 0:
R2 = R
else:
R1 = R
R= ( R1 + R2 ) / 2
elif idx == 2:
error = 1
while fabs( error ) > 0.1:
crs = zs + R * np.dot( rotz( pi / 2 ),np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cle = ze + R * np.dot( rotz( -pi/2 ),np.array( [cos(anglend), sin(anglend), 0] ).T )
L = computeDubinsRSL( R, crs, cle, anglstart, anglend )
error = ( L + 2 * pi * k * R ) * tan( gamma_max ) - fabs( hdist )
if error > 0:
R2 = R
else:
R1 = R
R = ( R1 + R2 ) / 2
elif idx == 3:
error = 1
while fabs( error ) > 0.1:
cls = zs + R * np.dot( rotz( -pi / 2 ), np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cre = ze + R * np.dot( rotz( pi / 2 ), np.array( [cos(anglend), sin(anglend), 0] ).T )
L = computeDubinsLSR( R, cls, cre, anglstart, anglend )
error = ( L + 2 * pi * k * R ) * tan( gamma_max ) - fabs( hdist )
if error > 0:
R2 = R
else:
R1 = R
R = ( R1 + R2 ) / 2
elif idx == 4:
error = 1
while fabs( error ) > 0.1:
cls = zs + R * np.dot( rotz( -pi / 2 ), np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cle = ze + R * np.dot( rotz( -pi / 2 ), np.array( [cos(anglend), sin(anglend), 0] ).T )
L = computeDubinsLSL( R, cls, cle, anglstart, anglend )
error = ( L + 2 * pi * k * R ) * tan( gamma_max ) - fabs( hdist )
if error > 0:
R2 = R
else:
R1 = R
R = ( R1 + R2 ) / 2
return R
def MinTurnRadius_DubinsAirplane(V=None,phi_max=None):
# Compute Minimum Turning Radius
g = 9.8065
Rmin = pow( V,2 ) / (g * tan( phi_max ) )
return Rmin
def addSpiralBeginning(zs=None, anglstart=None, ze=None, anglend=None, R_min=None, gamma_max=None, idx=None, hdist=None):
# Add Spiral in the Dubins Airplane Path beginning
cli = np.zeros((3,1))
cri = np.zeros((3,1))
zi = np.zeros((3,1))
anglinter = 0
L = 0
ci = np.zeros((3,1))
psii = 0
psi1 = 0
psi2 = 2 * pi
psi = ( psi1 + psi2 ) / 2
if idx == 1: # RLSR
crs = zs + R_min * np.dot( rotz( pi/2 ), np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cre = ze + R_min * np.dot( rotz( pi/2 ), np.array( [cos(anglend), sin(anglend), 0] ).T )
L = computeDubinsRSR( R_min, crs, cre, anglstart, anglend )
error = L - fabs( hdist / tan( gamma_max ) )
while fabs( error ) > 0.001:
zi = crs + np.dot( rotz( psi ),( zs-crs ) )
anglinter = anglstart + psi
cli = zi + R_min * np.dot( rotz( -pi/2 ), np.array( [cos(anglinter), sin(anglinter), 0] ).T )
L = computeDubinsLSR( R_min, cli, cre, anglinter, anglend )
error = ( L + fabs( psi ) * R_min ) - fabs( hdist / tan( gamma_max ) )
if error > 0:
psi2 = (179*psi2+psi)/180
else:
psi1 = (179*psi1+psi)/180
psi = ( psi1 + psi2 ) / 2
zi = crs + np.dot( rotz( psi ), ( zs-crs ) )
anglinter = anglstart + psi
L = L + fabs( psi ) * R_min
ci = cli
psii = psi
elif idx == 2: # RLSL
crs = zs + R_min * np.dot( rotz( pi / 2 ), np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cle = ze + R_min * np.dot( rotz( -pi / 2 ), np.array( [cos(anglend), sin(anglend), 0] ).T )
L = computeDubinsRSL( R_min, crs, cle, anglstart, anglend )
error = L - fabs( hdist / tan( gamma_max ) )
while fabs( error ) > 0.001:
zi = crs + np.dot( rotz( psi ), ( zs-crs ) )
anglinter = anglstart + psi
cli = zi + R_min * np.dot( rotz( -pi / 2 ), np.array( [cos(anglinter), sin(anglinter), 0] ).T )
L = computeDubinsLSL( R_min, cli, cle, anglinter, anglend )
error = ( L + fabs( psi ) * R_min ) - fabs( hdist / tan( gamma_max ) )
if error > 0:
psi2 = (179*psi2+psi)/180
else:
psi1 = (179*psi1+psi)/180
psi = ( psi1 + psi2 ) / 2
zi = crs + np.dot( rotz( psi ), ( zs-crs ) )
anglinter = anglstart + psi
L = L + fabs( psi ) * R_min
ci = cli
psii = psi
elif idx == 3: # LRSR
cls = zs + R_min * np.dot( rotz( -pi/2 ), np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cre = ze + R_min * np.dot( rotz( pi/2 ), np.array( [cos(anglend), sin(anglend), 0] ).T )
L = computeDubinsLSR( R_min, cls, cre, anglstart, anglend )
error = L - fabs( hdist / tan( gamma_max ) )
while fabs( error ) > 0.001:
zi = cls + np.dot( rotz( -psi ), ( zs-cls ) )
anglinter = anglstart - psi
cri = zi + R_min * np.dot( rotz( pi / 2 ), np.array( [cos(anglinter), sin(anglinter), 0] ).T )
L = computeDubinsRSR( R_min, cri, cre, anglinter, anglend )
error = ( L + fabs( psi ) * R_min ) - fabs( hdist / tan( gamma_max ) )
if error > 0:
psi2 = (179*psi2+psi)/180
else:
psi1 = (179*psi1+psi)/180
psi = ( psi1 + psi2 ) / 2
zi = cls + np.dot( rotz( -psi ), ( zs-cls ) )
anglinter = anglstart - psi
L = L + fabs( psi ) * R_min
ci = cri
psii = psi
elif idx == 4: # LRSL
cls = zs + R_min * np.dot( rotz( -pi/2 ), np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cle = ze + R_min * np.dot( rotz( -pi/2 ), np.array( [cos(anglend), sin(anglend), 0] ).T )
# above modified by liucz 2015-10-12, fix spell mistake cre -> cle
# origin is "cre = ze + R_min * np.dot( rotz( -pi/2 ), np.array( [cos(anglend), sin(anglend), 0] ).T )"
L = computeDubinsLSL( R_min, cls, cle, anglstart, anglend )
error = L - fabs( hdist / tan( gamma_max ) )
while fabs( error ) > 0.001:
zi = cls + np.dot( rotz( -psi ), ( zs-cls ) )
anglinter = anglstart - psi
cri = zi + R_min * np.dot( rotz( pi / 2 ), np.array( [cos(anglinter), sin(anglinter), 0 ] ).T )
# above is modified by licz 2015-10-12, fix written mistake np.array -> np.dot
# origin is "cri = zi + R_min * np.array( rotz( pi / 2 ), np.array( [cos(anglinter), sin(anglinter), 0 ] ).T )"
L = computeDubinsRSL( R_min, cri, cle, anglinter, anglend )
error = ( L + fabs( psi ) * R_min ) - fabs( hdist / tan( gamma_max) )
if error > 0:
psi2 = (179*psi2+psi)/180
else:
psi1 = (179*psi1+psi)/180
psi = ( psi1 + psi2 ) / 2
zi = cls + np.dot( rotz( -psi ), ( zs-cls ) )
anglinter = anglstart - psi
L = L + fabs( psi ) * R_min
ci = cri
psii = psi
return zi, anglinter, L, ci, psii
def addSpiralEnd(zs=None, anglstart=None, ze=None, anglend=None, R_min=None, gamma_max=None, idx=None, hdist=None):
# Add Spiral at the end of the Dubins Airplane path
cli = np.zeros((3,1))
cri = np.zeros((3,1))
zi = np.zeros((3,1))
anglinter = 0
L = 0
ci = np.zeros((3,1))
psii = 0
psi1 = 0
psi2 = 2 * pi
psi = ( psi1 + psi2 ) / 2
if idx == 1: # RSLR
crs = zs + R_min * np.dot( rotz( pi / 2 ), np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cre = ze + R_min * np.dot( rotz( pi/2 ), np.array( [cos(anglend), sin(anglend), 0] ).T )
L = computeDubinsRSR( R_min, crs, cre, anglstart, anglend )
error = L - fabs( hdist / tan( gamma_max ) )
while fabs( error ) > 0.001:
zi = cre + np.dot( rotz( -psi ), ( ze-cre ) )
anglinter = anglend - psi
cli = zi + R_min * np.dot( rotz( -pi / 2 ), np.array( [cos(anglinter), sin(anglinter), 0] ).T )
L = computeDubinsRSL( R_min, crs, cli, anglstart, anglinter )
error = ( L + fabs( psi ) * R_min ) - fabs( hdist / tan( gamma_max ) )
if error > 0:
psi2 = (179*psi2+psi)/180
else:
psi1 = (179*psi1+psi)/180
psi = ( psi1 + psi2 ) / 2
zi = cre + np.dot( rotz( -psi ), ( ze-cre ) )
anglinter = anglend - psi
L = L + abs( psi ) * R_min
ci = cli
psii = psi
elif idx == 2: # RSRL
crs = zs + R_min * np.dot( rotz( pi / 2 ), np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cle = ze + R_min * np.dot( rotz( -pi/2 ), np.array( [cos(anglend), sin(anglend), 0] ).T )
L = computeDubinsRSL( R_min, crs, cle, anglstart, anglend )
error = L - fabs( hdist / tan( gamma_max ) )
while fabs( error ) > 0.001:
zi = cle + np.dot( rotz( psi ), ( ze-cle ) )
anglinter = anglend + psi
cri = zi + R_min * np.dot( rotz( pi / 2 ), np.array( [cos(anglinter), sin(anglinter), 0] ).T )
L = computeDubinsRSR( R_min, crs, cri, anglstart, anglinter )
error = ( L + fabs( psi ) * R_min ) - fabs( hdist / tan( gamma_max ) )
if error > 0:
psi2 = (179*psi2+psi)/180
else:
psi1 = (179*psi1+psi)/180
psi = ( psi1 + psi2 ) / 2
zi = cle + np.dot( rotz( psi ), ( ze-cle ) )
anglinter = anglend + psi
cri = zi + R_min * np.dot( rotz( pi / 2 ), np.array( [cos(anglinter), sin(anglinter), 0] ).T )
L = L + fabs( psi ) * R_min
ci = cri
psii = psi
elif idx == 3: # LSLR
cls = zs + R_min * np.dot( rotz( -pi / 2 ), np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cre = ze + R_min * np.dot( rotz( pi / 2 ), np.array( [cos(anglend), sin(anglend), 0] ).T )
L = computeDubinsLSR( R_min, cls, cre, anglstart, anglend )
error = L - fabs( hdist / tan( gamma_max ))
while fabs( error ) > 0.001:
zi = cre + np.dot( rotz( -psi ), ( ze-cre ) )
anglinter = anglend - psi
cli = zi + R_min * np.dot( rotz( -pi / 2 ), np.array( [cos(anglinter), sin(anglinter), 0] ).T )
L = computeDubinsLSL( R_min, cls, cli, anglstart, anglinter )
error = ( L + fabs( psi ) * R_min ) - fabs( hdist / tan( gamma_max ) )
if error > 0:
psi2 = (179*psi2+psi)/180
else:
psi1 = (179*psi1+psi)/180
psi = ( psi1 + psi2 ) / 2
zi = cre + np.dot( rotz( -psi ), ( ze-cre ) )
anglinter = anglend - psi
L = L + fabs( psi ) * R_min
ci = cli
psii = psi
elif idx == 4:
cls = zs + R_min * np.dot( rotz( -pi/2 ), np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cle = ze + R_min * np.dot( rotz( -pi/2 ), np.array( [cos(anglend), sin(anglend), 0] ).T )
L = computeDubinsLSL( R_min, cls, cle, anglstart, anglend )
error = L - fabs( hdist / tan( gamma_max ) )
while fabs( error ) > 0.001:
zi = cle + np.dot( rotz( psi ), ( ze-cle ) )
anglinter = anglend + psi
cri = zi + R_min * np.dot( rotz( pi / 2 ), np.array( [cos(anglinter), sin(anglinter), 0] ).T )
L = computeDubinsLSR( R_min, cls, cri, anglstart, anglinter )
error = ( L + fabs( psi ) * R_min ) - fabs( hdist / tan( gamma_max ) )
if error > 0:
psi2 = (179*psi2+psi)/180
else:
psi1 = (179*psi1+psi)/180
psi = ( psi1 + psi2 ) / 2
zi = cle + np.dot( rotz( psi ), ( ze-cle ))
anglinter = anglend + psi
L = L + fabs( psi ) * R_min
ci = cri
psii = psi
return zi, anglinter, L, ci, psii
def DubinsAirplanePath(init_conf=None, final_conf=None, R_min=None, gamma_max=None):
# Compute the Dubins Airplane path
zs = (init_conf[0:3]).T
anglstart = init_conf[3]
ze = (final_conf[0:3]).T
anglend = final_conf[3]
DubinsAirplaneSolution['p_s'] = zs
DubinsAirplaneSolution['angl_s'] = anglstart
DubinsAirplaneSolution['p_e'] = ze
DubinsAirplaneSolution['angl_e'] = anglend
crs = zs + R_min*np.dot(rotz(pi/2), np.array([cos(anglstart), sin(anglstart), 0]).T)
cls = zs + R_min*np.dot(rotz(-pi/2),np.array([cos(anglstart), sin(anglstart), 0]).T)
cre = ze + R_min*np.dot(rotz(pi/2),np.array([cos(anglend), sin(anglend), 0]).T)
cle = ze + R_min*np.dot(rotz(-pi/2),np.array([cos(anglend), sin(anglend), 0]).T)
# compute L1, L2, L3, L4
L1 = computeDubinsRSR(R_min, crs, cre, anglstart, anglend)
L2 = computeDubinsRSL(R_min, crs, cle, anglstart, anglend)
L3 = computeDubinsLSR(R_min, cls, cre, anglstart, anglend)
L4 = computeDubinsLSL(R_min, cls, cle, anglstart, anglend)
# L is the minimum distance
L = np.amin(np.array([L1, L2, L3, L4]))
idx = np.where(np.array([L1,L2,L3,L4])==L)[0][0] + 1
hdist = -(ze[2] - zs[2])
if fabs(hdist) <= L*tan(gamma_max):
gam = atan(hdist/L)
DubinsAirplaneSolution['case'] = 1
DubinsAirplaneSolution['R'] = R_min
DubinsAirplaneSolution['gamma'] = gam
DubinsAirplaneSolution['L'] = L/cos(gam)
DubinsAirplaneSolution['k_s'] = 0
DubinsAirplaneSolution['k_e'] = 0
elif fabs(hdist) >= (L+2*pi*R_min)*tan(gamma_max):
k = np.floor( (fabs(hdist)/tan(gamma_max) - L)/(2*pi*R_min))
if hdist >= 0:
DubinsAirplaneSolution['k_s'] = k
DubinsAirplaneSolution['k_e'] = 0
else:
DubinsAirplaneSolution['k_s'] = 0
DubinsAirplaneSolution['k_e'] = k
# find optimal turning radius
R = computeOptimalRadius(zs, anglstart, ze, anglend, R_min, gamma_max, idx, k, hdist)
# recompute the centers of spirals and Dubins path length with new R
crs = zs + R*np.dot(rotz(pi/2), np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cls = zs + R*np.dot(rotz(-pi/2), np.array( [cos(anglstart), sin(anglstart), 0] ).T )
cre = ze + R*np.dot(rotz(pi/2), np.array( [cos(anglend), sin(anglend), 0] ).T )
cle = ze + R*np.dot(rotz(-pi/2), np.array( [cos(anglend), sin(anglend), 0] ).T )
if idx == 1:
L = computeDubinsRSR( R, crs, cre, anglstart, anglend )
elif idx == 2:
L = computeDubinsRSL( R, crs, cle, anglstart, anglend )
elif idx == 3:
L = computeDubinsLSR( R, cls, cre, anglstart, anglend )
elif idx == 4:
L = computeDubinsLSL( R, cls, cle, anglstart, anglend )
DubinsAirplaneSolution['case'] = 1
DubinsAirplaneSolution['R'] = R
gam = np.sign( hdist ) * gamma_max
DubinsAirplaneSolution['gamma'] = gam
DubinsAirplaneSolution['L'] = ( L + 2 * pi * k * R ) / cos( gamma_max )
else:
gam = np.sign( hdist ) * gamma_max
if hdist > 0:
zi, chii, L, ci, psii = addSpiralBeginning( zs, anglstart, ze, anglend, R_min, gam, idx, hdist )
DubinsAirplaneSolution['case'] = 2
else:
zi, chii, L, ci, psii = addSpiralEnd( zs, anglstart, ze, anglend, R_min, gam, idx, hdist )
DubinsAirplaneSolution['case'] = 3
DubinsAirplaneSolution['R'] = R_min
DubinsAirplaneSolution['gamma'] = gam
DubinsAirplaneSolution['L'] = L / cos( gamma_max )
e1 = np.array( [1, 0, 0] ).T
R = DubinsAirplaneSolution['R']
if np.isscalar(DubinsAirplaneSolution['case']):
pass
else:
print('### Error')
if DubinsAirplaneSolution['case'] == 1: # spiral-line-spiral
if idx == 1: # right-straight-right
theta = atan2( cre[1]-crs[1], cre[0]-crs[0])
dist1 = R*fmod(2*pi+fmod(theta-pi/2,2*pi)-fmod(anglstart-pi/2,2*pi),2*pi) + 2*pi*R*DubinsAirplaneSolution['k_s']
dist2 = R*fmod(2*pi+fmod(anglend-pi/2,2*pi)-fmod(theta-pi/2,2*pi),2*pi) + 2*pi*R*DubinsAirplaneSolution['k_e']
w1 = crs + DubinsAirplaneSolution['R']*np.dot(rotz(theta-pi/2),e1.T).T + np.array([0,0,-dist1*tan(gam)]).T
w2 = cre + DubinsAirplaneSolution['R']*np.dot(rotz(theta-pi/2),e1.T).T - np.array([0,0,-dist2*tan(gam)]).T
q1 = (w2-w1)/np.linalg.norm(w2-w1,ord=2) # direction of line
DubinsAirplaneSolution['c_s'] = crs
DubinsAirplaneSolution['psi_s'] = anglstart-pi/2
DubinsAirplaneSolution['lamda_s'] = 1
# end spiral
DubinsAirplaneSolution['c_e'] = cre-np.array([0,0,-dist2*tan(gam)])
DubinsAirplaneSolution['psi_e'] = theta-pi/2
DubinsAirplaneSolution['lamda_e'] = 1
# hyperplane H_s: switch from first spiral to line
DubinsAirplaneSolution['w_s'] = w1
DubinsAirplaneSolution['q_s'] = q1
# hyperplane H_l: switch from line to last spiral
DubinsAirplaneSolution['w_l'] = w2
DubinsAirplaneSolution['q_l'] = q1
# hyperplane H_e: end of Dubins path
DubinsAirplaneSolution['w_e'] = ze
DubinsAirplaneSolution['q_e'] = np.dot(rotz(anglend), np.array([1,0,0]).T)
elif idx == 2: # right-straight-left
ell = np.linalg.norm(cle[0:2] - crs[0:2],ord=2)
theta = atan2(cle[1]-crs[1], cle[0]-crs[0])
theta2 = theta - pi/2 + asin(2*R/ell)
dist1 = R*fmod(2*pi+fmod(theta2,2*pi)-fmod(anglstart-pi/2,2*pi),2*pi) + 2*pi*R*DubinsAirplaneSolution['k_s']
dist2 = R*fmod(2*pi+fmod(theta2+pi,2*pi)-fmod(anglend+pi/2,2*pi),2*pi) + 2*pi*R*DubinsAirplaneSolution['k_e']
w1 = crs + R*np.dot(rotz(theta2), e1.T).T + np.array([0, 0, -dist1*tan(gam)]).T
w2 = cle + R*np.dot(rotz(theta2+pi),e1.T).T - np.array([0,0,-dist2*tan(gam)]).T
q1 = (w2-w1)/np.linalg.norm(w2-w1,ord=2)
# start spiral
DubinsAirplaneSolution['c_s'] = crs
DubinsAirplaneSolution['psi_s'] = anglstart-pi/2
DubinsAirplaneSolution['lamda_s'] = 1
# end spiral
DubinsAirplaneSolution['c_e'] = cle - np.array([0,0,-dist2*tan(gam)]).T
DubinsAirplaneSolution['psi_e'] = theta2+pi
DubinsAirplaneSolution['lamda_e'] = -1
# hyperplane H_s: switch from first spiral to line
DubinsAirplaneSolution['w_s'] = w1
DubinsAirplaneSolution['q_s'] = q1
# hyperplane H_l: switch from line to end spiral
DubinsAirplaneSolution['w_l'] = w2
DubinsAirplaneSolution['q_l'] = q1
# hyperplane H_e: end of Dubins path
DubinsAirplaneSolution['w_e'] = ze
DubinsAirplaneSolution['q_e'] = np.dot(rotz(anglend),np.array([1,0,0]).T)
elif idx == 3: # left-straight-right
ell = np.linalg.norm(cre[0:2]-cls[0:2],ord=2)
theta = atan2( cre[1]-cls[1],cre[0]-cls[0])
theta2 = acos(2*R/ell)
dist1 = R*fmod(2*pi-fmod(theta+theta2,2*pi) + fmod(anglstart+pi/2,2*pi),2*pi) + 2*pi*R*DubinsAirplaneSolution['k_s']
dist2 = R*fmod(2*pi-fmod(theta+theta2-pi,2*pi)+fmod(anglend-pi/2,2*pi),2*pi) + 2*pi*R*DubinsAirplaneSolution['k_e']
w1 = cls + R*np.dot(rotz(theta+theta2),e1.T).T + np.array([0, 0, -dist1*tan(gam)]).T
w2 = cre + R*np.dot(rotz(-pi+theta+theta2),e1.T).T - np.array([0, 0, -dist2*tan(gam)]).T
q1 = (w2-w1)/np.linalg.norm(w2-w1,ord=2)
# start spiral
DubinsAirplaneSolution['c_s'] = cls
DubinsAirplaneSolution['psi_s'] = anglstart+pi/2
DubinsAirplaneSolution['lamda_s'] = -1
# end spiral
DubinsAirplaneSolution['c_e'] = cre - np.array([0,0,-dist2*tan(gam)]).T
DubinsAirplaneSolution['psi_e'] = fmod(theta+theta2-pi,2*pi)
DubinsAirplaneSolution['lamda_e'] = 1
# hyperplane H_s: switch from first spiral to line
DubinsAirplaneSolution['w_s'] = w1
DubinsAirplaneSolution['q_s'] = q1
# hyperplane H_l: switch from line to end spiral
DubinsAirplaneSolution['w_l'] = w2
DubinsAirplaneSolution['q_l'] = q1
# hyperplane H_e: end of Dubins path
DubinsAirplaneSolution['w_e'] = ze
DubinsAirplaneSolution['q_e'] = np.dot(rotz(anglend), np.array([1, 0, 0]).T)
elif idx ==4: # left-straight-left
theta = atan2(cle[1] -cls[1], cle[0] - cls[0])
dist1 = R*fmod(2*pi-fmod(theta+pi/2,2*pi)+fmod(anglstart+pi/2,2*pi),2*pi) + 2*pi*R*DubinsAirplaneSolution['k_s']
dist2 = R*fmod(2*pi-fmod(anglend+pi/2,2*pi)+fmod(theta+pi/2,2*pi),2*pi) + 2*pi*R*DubinsAirplaneSolution['k_e']
w1 = cls + DubinsAirplaneSolution['R']*np.dot(rotz(theta+pi/2),e1.T).T + np.array([0,0,-dist1*tan(gam)]).T
w2 = cle + DubinsAirplaneSolution['R']*np.dot(rotz(theta+pi/2),e1.T).T - np.array([0,0,-dist2*tan(gam)]).T
q1 = (w2-w1)/np.linalg.norm(w2-w1,ord=2)
# start spiral
DubinsAirplaneSolution['c_s'] = cls
DubinsAirplaneSolution['psi_s'] = anglstart+pi/2
DubinsAirplaneSolution['lamda_s'] = -1
# end spiral
DubinsAirplaneSolution['c_e'] = cle - np.array([0,0,-dist2*tan(gam)]).T
DubinsAirplaneSolution['psi_e'] = theta+pi/2
DubinsAirplaneSolution['lamda_e'] = -1
# hyperplane H_s: switch from first spiral to line
DubinsAirplaneSolution['w_s'] = w1
DubinsAirplaneSolution['q_s'] = q1
# hyperplane H_l: switch from line to end spiral
DubinsAirplaneSolution['w_l'] = w2
DubinsAirplaneSolution['q_l'] = q1
# hyperplane H_e: end of Dubins path
DubinsAirplaneSolution['w_e'] = ze
DubinsAirplaneSolution['q_e'] = np.dot(rotz(anglend), np.array([1,0,0]).T)
elif DubinsAirplaneSolution['case'] == 2:
if idx == 1: # right-left-straight-right
# start spiral
DubinsAirplaneSolution['c_s'] = crs
DubinsAirplaneSolution['psi_s'] = anglstart-pi/2
DubinsAirplaneSolution['lamda_s'] = 1
DubinsAirplaneSolution['k_s'] = 0
ell = np.linalg.norm(cre[0:2]-ci[0:2],ord=2)
theta = atan2(cre[1] - ci[1], cre[0] - ci[0])
theta2 = acos(2*R/ell)
dist1 = R_min*psii + R*fmod(2*pi-fmod(theta+theta2,2*pi) + fmod(chii+pi/2,2*pi),2*pi)
dist2 = R*fmod(2*pi-fmod(theta+theta2-pi,2*pi)+fmod(anglend-pi/2,2*pi),2*pi)
w1 = ci + R*np.dot(rotz(theta+theta2),e1.T).T + np.array([0, 0, -dist1*tan(gam)]).T
w2 = cre + R*np.dot(rotz(-pi+theta+theta2),e1.T).T - np.array([0, 0, -dist2*tan(gam)]).T
q1 = (w2-w1)/np.linalg.norm(w2-w1,ord=2)
# intermediate-start spiral
DubinsAirplaneSolution['c_si'] = ci + np.array([0, 0, -R_min*psii*tan(gam)]).T
DubinsAirplaneSolution['psi_si'] = chii + pi/2
DubinsAirplaneSolution['lamda_si'] = -1
DubinsAirplaneSolution['k_si'] = 0
# end spiral
DubinsAirplaneSolution['c_e'] = cre - np.array([0,0,-dist2*tan(gam)]).T
DubinsAirplaneSolution['psi_e'] = fmod(theta+theta2-pi,2*pi)
DubinsAirplaneSolution['lamda_e'] = 1
DubinsAirplaneSolution['k_e'] = 0
# hyperplane H_s: switch from first to second spiral
DubinsAirplaneSolution['w_s'] = zi - np.array([0, 0, -psii*R_min*tan(gam)]).T
DubinsAirplaneSolution['q_s'] = np.array([cos(chii), sin(chii), 0]).T
# hyperplane H_si: switch from second spiral to straight line
DubinsAirplaneSolution['w_si'] = w1
DubinsAirplaneSolution['q_si'] = q1
# hyperplane H_l: switch from straight-line to end spiral
DubinsAirplaneSolution['w_l'] = w2
DubinsAirplaneSolution['q_l'] = q1
# hyperplane H_e: end of Dubins path
DubinsAirplaneSolution['w_e'] = ze
DubinsAirplaneSolution['q_e'] = np.dot(rotz(anglend), np.array([1,0,0]).T)
elif idx == 2: # right-left-straight-left
theta = atan2(cle[1]-ci[1],cle[0]-ci[0])
dist1 = R*fmod(2*pi-fmod(theta+pi/2,2*pi)+fmod(chii+pi/2,2*pi),2*pi)
dist2 = psii*R
dist3 = R*fmod(2*pi-fmod(anglend+pi/2,2*pi)+fmod(theta+pi/2,2*pi),2*pi)
w1 = ci + DubinsAirplaneSolution['R']*np.dot(rotz(theta+pi/2),e1.T).T + np.array([0, 0, -(dist1+dist2)*tan(gam)]).T
w2 = cle + DubinsAirplaneSolution['R']*np.dot(rotz(theta+pi/2),e1.T).T - np.array([0,0,-dist3*tan(gam)]).T
q1 = (w2-w1)/np.linalg.norm(w2-w1,ord=2) # direction of line
# start spiral
DubinsAirplaneSolution['c_s'] = crs
DubinsAirplaneSolution['psi_s'] = anglstart-pi/2
DubinsAirplaneSolution['lamda_s'] = 1
DubinsAirplaneSolution['k_s'] = 0
# intermediate-start spiral
DubinsAirplaneSolution['c_si'] = ci + np.array([0, 0, -dist2*tan(gam)]).T
DubinsAirplaneSolution['psi_si'] = chii+pi/2
DubinsAirplaneSolution['lamda_si'] = -1
DubinsAirplaneSolution['k_si'] = 0
# end spiral
DubinsAirplaneSolution['c_e'] = cle - np.array([0, 0, -dist3*tan(gam)]).T
DubinsAirplaneSolution['psi_e'] = theta+pi/2
DubinsAirplaneSolution['lamda_e'] = -1
DubinsAirplaneSolution['k_e'] = 0
# hyperplane H_s: switch from first to second spiral
DubinsAirplaneSolution['w_s'] = zi - np.array([0, 0, -dist2*tan(gam)]).T
DubinsAirplaneSolution['q_s'] = np.array([cos(chii), sin(chii), 0]).T
# hyperplane H_si: switch from second spiral to straight line
DubinsAirplaneSolution['w_si'] = w1
DubinsAirplaneSolution['q_si'] = q1
# hyperplane H_l: switch from straight-line to end spiral
DubinsAirplaneSolution['w_l'] = w2
DubinsAirplaneSolution['q_l'] = q1
# hyperplane H_e: end of Dubins path
DubinsAirplaneSolution['w_e'] = ze
DubinsAirplaneSolution['q_e'] = np.dot(rotz(anglend), np.array([1, 0, 0]).T)
elif idx == 3: # left-right-straight-right
theta = atan2(cre[1]-ci[1], cre[0] - ci[0])
dist1 = R*fmod(2*pi+fmod(theta-pi/2,2*pi)-fmod(chii-pi/2,2*pi),2*pi)
dist2 = psii*R
dist3 = R*fmod(2*pi+fmod(anglend-pi/2,2*pi)-fmod(theta-pi/2,2*pi),2*pi)
w1 = ci + DubinsAirplaneSolution['R']*np.dot(rotz(theta-pi/2),e1.T).T + np.array([0, 0, -(dist1+dist2)*tan(gam)]).T
w2 = cre + DubinsAirplaneSolution['R']*np.dot(rotz(theta-pi/2),e1.T).T - np.array([0, 0, -dist3*tan(gam)]).T
q1 = (w2-w1)/np.linalg.norm(w2-w1,ord=2) # direction of line
# start spiral
DubinsAirplaneSolution['c_s'] = cls
DubinsAirplaneSolution['psi_s'] = anglstart+pi/2
DubinsAirplaneSolution['lamda_s'] = -1
DubinsAirplaneSolution['k_s'] = 0
# intermediate-start spiral
DubinsAirplaneSolution['c_si'] = ci + np.array([0, 0, -dist2*tan(gam)]).T
DubinsAirplaneSolution['psi_si'] = chii-pi/2
DubinsAirplaneSolution['lamda_si'] = 1
DubinsAirplaneSolution['k_si'] = 0
# end spiral
DubinsAirplaneSolution['c_e'] = cre - np.array([0, 0, -dist3*tan(gam)]).T
DubinsAirplaneSolution['psi_e'] = theta-pi/2
DubinsAirplaneSolution['lamda_e'] = 1
DubinsAirplaneSolution['k_e'] = 0
# hyperplane H_s: switch from first to second spiral
DubinsAirplaneSolution['w_s'] = zi - np.array([0, 0, -dist2*tan(gam)]).T
DubinsAirplaneSolution['q_s'] = np.array([cos(chii), sin(chii), 0]).T
# hyperplane H_si: switch from second spiral to straight line
DubinsAirplaneSolution['w_si'] = w1
DubinsAirplaneSolution['q_si'] = q1
# hyperplane H_l: switch from straight-line to end spiral
DubinsAirplaneSolution['w_l'] = w2
DubinsAirplaneSolution['q_l'] = q1
# hyperplane H_e: end of Dubins path
DubinsAirplaneSolution['w_e'] = ze
DubinsAirplaneSolution['q_e'] = np.dot(rotz(anglend), np.array([1,0,0]).T)
elif idx == 4: # left-right-straight-left
ell = np.linalg.norm(cle[0:2]-ci[0:2],ord=2)
theta = atan2(cle[1] - ci[1], cle[0] - ci[0])
theta2 = theta - pi/2 + asin(2*R/ell)
dist1 = R*fmod(2*pi+fmod(theta2,2*pi) - fmod(chii-pi/2,2*pi),2*pi)
dist2 = R*psii
dist3 = R*fmod(2*pi+fmod(theta2+pi,2*pi) - fmod(anglend+pi/2,2*pi),2*pi)
w1 = ci + R*np.dot(rotz(theta2),e1.T).T + np.array([0, 0, -(dist1+dist2)*tan(gam)]).T
w2 = cle + R*np.dot(rotz(theta2+pi),e1.T).T - np.array([0, 0, -dist3*tan(gam)]).T
q1 = (w2-w1)/np.linalg.norm(w2-w1,ord=2)
# start spiral
DubinsAirplaneSolution['c_s'] = cls
DubinsAirplaneSolution['psi_s'] = anglstart+pi/2
DubinsAirplaneSolution['lamda_s'] = -1
DubinsAirplaneSolution['k_s'] = 0
# intermediate-start spiral
DubinsAirplaneSolution['c_si'] = ci + np.array([0, 0, -dist2*tan(gam)]).T
DubinsAirplaneSolution['psi_si'] = chii-pi/2
DubinsAirplaneSolution['lamda_si'] = 1
DubinsAirplaneSolution['k_si'] = 0
# end spiral
DubinsAirplaneSolution['c_e'] = cle - np.array([0, 0, -dist3*tan(gam)]).T
DubinsAirplaneSolution['psi_e'] = theta2+pi
DubinsAirplaneSolution['lamda_e'] = -1
DubinsAirplaneSolution['k_e'] = 0
# hyperplane H_s: switch from first to second spiral
DubinsAirplaneSolution['w_s'] = zi - np.array([0, 0, -dist2*tan(gam)]).T
DubinsAirplaneSolution['q_s'] = np.array([cos(chii), sin(chii), 0]).T
# hyperplane H_si: switch from second spiral to straight line
DubinsAirplaneSolution['w_si'] = w1
DubinsAirplaneSolution['q_si'] = q1
# hyperplane H_l: switch from straight-line to end spiral
DubinsAirplaneSolution['w_l'] = w2
DubinsAirplaneSolution['q_l'] = q1
# hyperplane H_e: end of Dubins path
DubinsAirplaneSolution['w_e'] = ze
DubinsAirplaneSolution['q_e'] = np.dot(rotz(anglend), np.array([1, 0, 0]).T)
elif DubinsAirplaneSolution['case'] == 3:
if idx == 1: # right-straight-left-right
# path specific calculations
ell = np.linalg.norm(ci[0:2] - crs[0:2],ord=2)
theta = atan2(ci[1] - crs[1], ci[0] - crs[0])
theta2 = theta-pi/2 + asin(2*R/ell)
dist1 = R*fmod(2*pi+fmod(theta2,2*pi) - fmod(anglstart-pi/2,2*pi),2*pi)
dist2 = R*fmod(2*pi+fmod(theta2+pi,2*pi)-fmod(chii+pi/2,2*pi),2*pi)
dist3 = fabs(R_min*psii)
w1 = crs + R*np.dot(rotz(theta2),e1.T).T + np.array([0, 0, -dist1*tan(gam)]).T
w2 = ci + R*np.dot(rotz(theta2+pi),e1.T).T - np.array([0, 0, -(dist2+dist3)*tan(gam)]).T
q1 = (w2-w1)/np.linalg.norm(w2-w1,ord=2)
# start spiral
DubinsAirplaneSolution['c_s'] = crs
DubinsAirplaneSolution['psi_s'] = anglstart-pi/2
DubinsAirplaneSolution['lamda_s'] = 1
DubinsAirplaneSolution['k_s'] = 0
# intermediate-end spiral
DubinsAirplaneSolution['c_ei'] = ci - np.array([0, 0, -(dist2+dist3)*tan(gam)]).T
DubinsAirplaneSolution['psi_ei'] = theta2+pi
DubinsAirplaneSolution['lamda_ei'] = -1
DubinsAirplaneSolution['k_ei'] = 0
# end spiral
DubinsAirplaneSolution['c_e'] = cre - np.array([0, 0, -dist3*tan(gam)]).T
DubinsAirplaneSolution['psi_e'] = anglend-pi/2-psii
DubinsAirplaneSolution['lamda_e'] = 1
DubinsAirplaneSolution['k_e'] = 0
# hyperplane H_s: switch from first to second spiral
DubinsAirplaneSolution['w_s'] = w1
DubinsAirplaneSolution['q_s'] = q1
# hyperplane H_l: switch from straight-line to intermediate spiral
DubinsAirplaneSolution['w_l'] = w2
DubinsAirplaneSolution['q_l'] = q1
# hyperplane H_ei: switch from intermediate spiral to
# end spiral
DubinsAirplaneSolution['w_ei'] = zi - np.array([0, 0, -dist3*tan(gam)]).T
DubinsAirplaneSolution['q_ei'] = np.array([cos(chii), sin(chii), 0]).T
# hyperplane H_e: end of Dubins path
DubinsAirplaneSolution['w_e'] = ze
DubinsAirplaneSolution['q_e'] = np.dot(rotz(anglend),np.array([1,0,0]).T)
elif idx == 2: # right-straight-right-left
# path specific calculations
theta = atan2(ci[1] - crs[1], ci[0] - crs[0])
dist1 = R*fmod(2*pi+fmod(theta-pi/2,2*pi) - fmod(anglstart-pi/2,2*pi),2*pi)
dist2 = R*fmod(2*pi+fmod(chii-pi/2,2*pi) - fmod(theta-pi/2,2*pi),2*pi)
dist3 = fabs(R_min*psii)
w1 = crs + R*np.dot(rotz(theta-pi/2),e1.T).T + np.array([0, 0, -dist1*tan(gam)]).T
w2 = ci + R*np.dot(rotz(theta-pi/2),e1.T).T - np.array([0, 0, -(dist2+dist3)*tan(gam)]).T
q1 = (w2-w1)/np.linalg.norm(w2-w1,ord=2)
# start spiral
DubinsAirplaneSolution['c_s'] = crs
DubinsAirplaneSolution['psi_s'] = anglstart-pi/2
DubinsAirplaneSolution['lamda_s'] = 1
DubinsAirplaneSolution['k_s'] = 0
# intermediate-end spiral
DubinsAirplaneSolution['c_ei'] = ci - np.array([0, 0, -(dist2+dist3)*tan(gam)]).T
DubinsAirplaneSolution['psi_ei'] = theta - pi/2
DubinsAirplaneSolution['lamda_ei'] = 1
DubinsAirplaneSolution['k_ei'] = 0
# end spiral
DubinsAirplaneSolution['c_e'] = cle - np.array([0, 0, -dist3*tan(gam)]).T
DubinsAirplaneSolution['psi_e'] = anglend+pi/2+psii
DubinsAirplaneSolution['lamda_e'] = -1
DubinsAirplaneSolution['k_e'] = 0
# hyperplane H_s: switch from first to second spiral
DubinsAirplaneSolution['w_s'] = w1
DubinsAirplaneSolution['q_s'] = q1
# hyperplane H_l: switch from straight-line to intermediate spiral
DubinsAirplaneSolution['w_l'] = w2
DubinsAirplaneSolution['q_l'] = q1
# hyperplane H_ei: switch from intermediate spiral to
# end spiral
DubinsAirplaneSolution['w_ei'] = zi - np.array([0, 0, -dist3*tan(gam)]).T
DubinsAirplaneSolution['q_ei'] = np.array([cos(chii), sin(chii), 0]).T
# hyperplane H_e: end of Dubins path
DubinsAirplaneSolution['w_e'] = ze
DubinsAirplaneSolution['q_e'] = np.dot(rotz(anglend), np.array([1, 0, 0]).T)
elif idx == 3: # left-straight-left-right
# path specific calculations
theta = atan2(ci[1]-cls[1],ci[0]-cls[0])
dist1 = R*fmod(2*pi-fmod(theta+pi/2,2*pi)+fmod(anglstart+pi/2,2*pi),2*pi)
dist2 = R*fmod(2*pi-fmod(chii+pi/2,2*pi)+fmod(theta+pi/2,2*pi),2*pi)
dist3 = fabs(R_min*psii)
w1 = cls + DubinsAirplaneSolution['R']*np.dot(rotz(theta+pi/2),e1.T).T + np.array([0, 0, -dist1*tan(gam)]).T
w2 = ci + DubinsAirplaneSolution['R']*np.dot(rotz(theta+pi/2),e1.T).T - np.array([0, 0, -(dist2+dist3)*tan(gam)]).T
q1 = (w2-w1)/np.linalg.norm(w2-w1,ord=2) # direction of line
# start spiral
DubinsAirplaneSolution['c_s'] = cls
DubinsAirplaneSolution['psi_s'] = anglstart+pi/2
DubinsAirplaneSolution['lamda_s'] = -1
DubinsAirplaneSolution['k_s'] = 0
# intermediate-end spiral
DubinsAirplaneSolution['c_ei'] = ci - np.array([0, 0, -(dist2+dist3)*tan(gam)]).T
DubinsAirplaneSolution['psi_ei'] = theta+pi/2
DubinsAirplaneSolution['lamda_ei'] = -1
DubinsAirplaneSolution['k_ei'] = 0
# end spiral
DubinsAirplaneSolution['c_e'] = cre - np.array([0, 0, -dist3*tan(gam)]).T
DubinsAirplaneSolution['psi_e'] = anglend-pi/2-psii
DubinsAirplaneSolution['lamda_e'] = 1
DubinsAirplaneSolution['k_e'] = 0
# hyperplane H_s: switch from first to second spiral
DubinsAirplaneSolution['w_s'] = w1
DubinsAirplaneSolution['q_s'] = q1
# hyperplane H_l: switch from straight-line to intermediate spiral
DubinsAirplaneSolution['w_l'] = w2
DubinsAirplaneSolution['q_l'] = q1
# hyperplane H_ei: switch from intermediate spiral to
# end spiral
DubinsAirplaneSolution['w_ei'] = zi - np.array([0, 0, -dist3*tan(gam)]).T
DubinsAirplaneSolution['q_ei'] = np.array([cos(chii), sin(chii), 0]).T
# hyperplane H_e: end of Dubins path
DubinsAirplaneSolution['w_e'] = ze
DubinsAirplaneSolution['q_e'] = np.dot(rotz(anglend),np.array([1,0,0]).T)
elif idx == 4: # left-straight-right-left
# path specific calculations
ell = np.linalg.norm(ci[0:2] - cls[0:2],ord=2)
theta = atan2( ci[1] - cls[1], ci[0] - cls[0])
theta2 = acos(2*R/ell)
dist1 = R*fmod(2*pi-fmod(theta+theta2,2*pi) + fmod(anglstart+pi/2,2*pi),2*pi)
dist2 = R*fmod(2*pi-fmod(theta+theta2-pi,2*pi)+fmod(chii-pi/2,2*pi),2*pi)
dist3 = fabs(R_min*psii)
w1 = cls + R*np.dot(rotz(theta+theta2),e1.T).T + np.array([0, 0, -dist1*tan(gam)]).T
w2 = ci + R*np.dot(rotz(-pi+theta+theta2),e1.T).T - np.array([0, 0, -(dist2+dist3)*tan(gam)]).T
q1 = (w2-w1)/np.linalg.norm(w2-w1,ord=2)
# start spiral
DubinsAirplaneSolution['c_s'] = cls
DubinsAirplaneSolution['psi_s'] = anglstart+pi/2
DubinsAirplaneSolution['lamda_s'] = -1
DubinsAirplaneSolution['k_s'] = 0
# intermediate-end spiral
DubinsAirplaneSolution['c_ei'] = ci - np.array([0, 0, -(dist2+dist3)*tan(gam)]).T
DubinsAirplaneSolution['psi_ei'] = fmod(theta+theta2-pi,2*pi)
DubinsAirplaneSolution['lamda_ei'] = 1
DubinsAirplaneSolution['k_ei'] = 0
# end spiral
DubinsAirplaneSolution['c_e'] = cle - np.array([0, 0, -dist3*tan(gam)]).T
DubinsAirplaneSolution['psi_e'] = anglend+pi/2+psii
DubinsAirplaneSolution['lamda_e'] = -1
DubinsAirplaneSolution['k_e'] = 0
# hyperplane H_s: switch from first to second spiral
DubinsAirplaneSolution['w_s'] = w1
DubinsAirplaneSolution['q_s'] = q1
# hyperplane H_l: switch from straight-line to intermediate spiral
DubinsAirplaneSolution['w_l'] = w2
DubinsAirplaneSolution['q_l'] = q1
# hyperplane H_ei: switch from intermediate spiral to
# end spiral
DubinsAirplaneSolution['w_ei'] = zi - np.array([0, 0, -dist3*tan(gam)]).T
DubinsAirplaneSolution['q_ei'] = np.array([cos(chii), sin(chii), 0]).T
# hyperplane H_e: end of Dubins path
DubinsAirplaneSolution['w_e'] = ze
DubinsAirplaneSolution['q_e'] = np.dot(rotz(anglend), np.array([1, 0, 0]).T)
if DubinsAirplaneSolution['case'] == 4:
print('### Not Implemented Case')
return DubinsAirplaneSolution
def drawline(w1=None, q1=None, w2=None, q2=None, step=None):
# extract line path
r = w1
# propagate line until cross half plane
s = 0
NrNc = r.shape
if len(NrNc) == 1:
NrNc_ind = NrNc[0]
last_col = r[:]
else:
NrNc_ind = NrNc[1]
last_col = r[:,NrNc[1]-1]
r.shape = (3,1)
while np.dot( (last_col - w2).T,q2 ) <= 0:
s = s + step
w1.shape = (3,1)
q1.shape = (3,1)
new_col = w1+s*q1
new_col.shape = (3,1)
r = np.hstack( (r, new_col) )
NrNc = r.shape
if len(NrNc) == 1:
NrNc_ind = NrNc[0]
last_col = r[:]
else:
NrNc_ind = NrNc[1]
last_col = r[:,NrNc[1]-1]
return r
def drawspiral(R=None, gam=None, c=None, psi=None, lam=None, k=None, w=None, q=None, step=None):
# extract spiral path
r = np.zeros((1,1))
r = c.T + R*np.array( [cos(psi), sin(psi), 0] ).T
r = r.T
# determine number of required crossings of half plane
NrNc = r.shape
if len(NrNc) ==1 :
NrNc_ind = NrNc[0]
halfplane = np.dot( (r[0:2]-w[0:2].T),q[0:2] )
else:
NrNc_ind = NrNc[1]
halfplane = np.dot( (r[0:2,NrNc_ind-1]-w[0:2].T),q[0:2] )
if (halfplane > 0).all() :
required_crossing = 2 * ( k + 1 )
else:
required_crossing = 2 * k + 1
# propagate spiral until cross half plane the right number of times
s = 0
r.shape = (3,1)
while ( required_crossing > 0 ) or ( (halfplane <= 0).all() ):
s = s +step
new_col = (c + R * np.array( [ cos(lam*s+psi), sin(lam*s+psi), -s*tan(gam)] ).T )
new_col.shape = (3,1)
r = np.hstack( (r, new_col) )
NrNc = r.shape
if len(NrNc)==1 :
NrNc_ind = NrNc[0]
if np.sign( halfplane ) != np.sign( np.dot((r[0:2]-w[0:2].T),q[0:2]) ):
halfplane = np.dot( ( r[0:2] - w[0:2].T ), q[0:2] )
required_crossing = required_crossing - 1
else:
NrNc_ind = NrNc[1]
if np.sign(halfplane) != np.sign( np.dot( (r[0:2,NrNc_ind-1]-w[0:2].T ), q[0:2]) ):
halfplane = np.dot( (r[0:2,NrNc_ind-1] - w[0:2] ).T, q[0:2] )
required_crossing = required_crossing - 1
return r
def ExtractDubinsAirplanePath(DubinsAirplaneSolutions=None, step=0.01):
# Extract the Dubins Airplane Solution in vector form
if DubinsAirplaneSolutions['case'] == 1: # spiral - line - spiral
r1 = drawspiral(DubinsAirplaneSolutions['R'],DubinsAirplaneSolutions['gamma'], DubinsAirplaneSolutions['c_s'], DubinsAirplaneSolutions['psi_s'], DubinsAirplaneSolutions['lamda_s'], DubinsAirplaneSolutions['k_s'], DubinsAirplaneSolutions['w_s'], DubinsAirplaneSolutions['q_s'],step)
r2 = drawline(DubinsAirplaneSolutions['w_s'], DubinsAirplaneSolutions['q_s'], DubinsAirplaneSolutions['w_l'], DubinsAirplaneSolutions['q_l'], step)
r3 = drawspiral(DubinsAirplaneSolutions['R'], DubinsAirplaneSolutions['gamma'], DubinsAirplaneSolutions['c_e'], DubinsAirplaneSolutions['psi_e'], DubinsAirplaneSolutions['lamda_e'], DubinsAirplaneSolutions['k_e'], DubinsAirplaneSolutions['w_e'], DubinsAirplaneSolutions['q_e'], step)
path = r1
r = np.hstack((r1,r2))
r = np.hstack((r,r3))
elif DubinsAirplaneSolutions['case'] == 2: # spiral - spiral - line -spiral
r1 = drawspiral(DubinsAirplaneSolutions['R'],DubinsAirplaneSolutions['gamma'],DubinsAirplaneSolutions['c_s'],DubinsAirplaneSolutions['psi_s'],DubinsAirplaneSolutions['lamda_s'],DubinsAirplaneSolutions['k_s'],DubinsAirplaneSolutions['w_s'],DubinsAirplaneSolutions['q_s'],step)
r2 = drawspiral(DubinsAirplaneSolutions['R'],DubinsAirplaneSolutions['gamma'],DubinsAirplaneSolutions['c_si'],DubinsAirplaneSolutions['psi_si'],DubinsAirplaneSolutions['lamda_si'],DubinsAirplaneSolutions['k_si'],DubinsAirplaneSolutions['w_si'],DubinsAirplaneSolutions['q_si'],step)
r3 = drawline(DubinsAirplaneSolutions['w_si'],DubinsAirplaneSolutions['q_si'],DubinsAirplaneSolutions['w_l'],DubinsAirplaneSolutions['q_l'],step)
r4 = drawspiral(DubinsAirplaneSolutions['R'],DubinsAirplaneSolutions['gamma'],DubinsAirplaneSolutions['c_e'],DubinsAirplaneSolutions['psi_e'],DubinsAirplaneSolutions['lamda_e'],DubinsAirplaneSolutions['k_e'],DubinsAirplaneSolutions['w_e'],DubinsAirplaneSolutions['q_e'],step)
path = r1
r = np.hstack((r1,r2))
r = np.hstack((r,r3))
r = np.hstack((r,r4))
elif DubinsAirplaneSolutions['case'] == 3: # spiral - line - spiral - spiral
r1 = drawspiral(DubinsAirplaneSolutions['R'],DubinsAirplaneSolutions['gamma'],DubinsAirplaneSolutions['c_s'],DubinsAirplaneSolutions['psi_s'],DubinsAirplaneSolutions['lamda_s'],DubinsAirplaneSolutions['k_s'],DubinsAirplaneSolutions['w_s'],DubinsAirplaneSolutions['q_s'],step)
r2 = drawline(DubinsAirplaneSolutions['w_s'],DubinsAirplaneSolutions['q_s'],DubinsAirplaneSolutions['w_l'],DubinsAirplaneSolutions['q_l'],step)
r3 = drawspiral(DubinsAirplaneSolutions['R'],DubinsAirplaneSolutions['gamma'],DubinsAirplaneSolutions['c_ei'],DubinsAirplaneSolutions['psi_ei'],DubinsAirplaneSolutions['lamda_ei'],DubinsAirplaneSolutions['k_ei'],DubinsAirplaneSolutions['w_ei'],DubinsAirplaneSolutions['q_ei'],step)
r4 = drawspiral(DubinsAirplaneSolutions['R'],DubinsAirplaneSolutions['gamma'],DubinsAirplaneSolutions['c_e'],DubinsAirplaneSolutions['psi_e'],DubinsAirplaneSolutions['lamda_e'],DubinsAirplaneSolutions['k_e'],DubinsAirplaneSolutions['w_e'],DubinsAirplaneSolutions['q_e'],step)
path = r1
r = np.hstack((r1,r2))
r = np.hstack((r,r3))
r = np.hstack((r,r4))
return r
| 48.875236 | 292 | 0.555115 | 6,887 | 51,710 | 4.089153 | 0.037462 | 0.019601 | 0.03004 | 0.016618 | 0.876607 | 0.862794 | 0.831013 | 0.805021 | 0.802926 | 0.78961 | 0 | 0.042411 | 0.291858 | 51,710 | 1,057 | 293 | 48.921476 | 0.726664 | 0.088126 | 0 | 0.752741 | 0 | 0 | 0.031147 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.017052 | false | 0.001218 | 0.003654 | 0 | 0.037759 | 0.002436 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
cde469be6c1edcec4fcf71d36a9bbbb219e47351 | 42,837 | py | Python | CondFormats/PCLConfig/python/ThresholdsHG_cff.py | menglu21/cmssw | c3d6cb102c0aaddf652805743370c28044d53da6 | [
"Apache-2.0"
] | null | null | null | CondFormats/PCLConfig/python/ThresholdsHG_cff.py | menglu21/cmssw | c3d6cb102c0aaddf652805743370c28044d53da6 | [
"Apache-2.0"
] | null | null | null | CondFormats/PCLConfig/python/ThresholdsHG_cff.py | menglu21/cmssw | c3d6cb102c0aaddf652805743370c28044d53da6 | [
"Apache-2.0"
] | null | null | null | import FWCore.ParameterSet.Config as cms
import copy
# -----------------------------------------------------------------------
# Default configuration
default = cms.VPSet(
#### Barrel Pixel HB X-
cms.PSet(alignableId = cms.string("TPBHalfBarrelXminus"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPBHalfBarrelXminus"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPBHalfBarrelXminus"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPBHalfBarrelXminus"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPBHalfBarrelXminus"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPBHalfBarrelXminus"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
### Barrel Pixel HB X+
cms.PSet(alignableId = cms.string("TPBHalfBarrelXplus"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPBHalfBarrelXplus"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0),
),
cms.PSet(alignableId = cms.string("TPBHalfBarrelXplus"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPBHalfBarrelXplus"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPBHalfBarrelXplus"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPBHalfBarrelXplus"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
### Forward Pixel HC X-,Z-
cms.PSet(alignableId = cms.string("TPEHalfCylinderXminusZminus"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXminusZminus"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXminusZminus"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXminusZminus"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXminusZminus"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXminusZminus"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
### Forward Pixel HC X+,Z-
cms.PSet(alignableId = cms.string("TPEHalfCylinderXplusZminus"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXplusZminus"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXplusZminus"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXplusZminus"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXplusZminus"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXplusZminus"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
### Forward Pixel HC X-,Z+
cms.PSet(alignableId = cms.string("TPEHalfCylinderXminusZplus"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXminusZplus"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXminusZplus"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXminusZplus"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXminusZplus"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXminusZplus"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
### Forward Pixel HC X+,Z+
cms.PSet(alignableId = cms.string("TPEHalfCylinderXplusZplus"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXplusZplus"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXplusZplus"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXplusZplus"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXplusZplus"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
cms.PSet(alignableId = cms.string("TPEHalfCylinderXplusZplus"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
maxMoveCut = cms.double(200.0),
maxErrorCut = cms.double(10.0)
),
### Barrel Pixel Ladder
cms.PSet(alignableId = cms.string("TPBLadder"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadder"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadder"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadder"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadder"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadder"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
### EndCap Pixel Panel
cms.PSet(alignableId = cms.string("TPEPanel"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanel"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanel"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanel"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanel"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanel"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
### Barrel Pixel Ladder Layer 1
cms.PSet(alignableId = cms.string("TPBLadderLayer1"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer1"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer1"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer1"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer1"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer1"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
### Barrel Pixel Ladder Layer 2
cms.PSet(alignableId = cms.string("TPBLadderLayer2"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer2"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer2"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer2"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer2"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer2"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
### Barrel Pixel Ladder Layer 3
cms.PSet(alignableId = cms.string("TPBLadderLayer3"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer3"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer3"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer3"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer3"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer3"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
### Barrel Pixel Ladder Layer 4
cms.PSet(alignableId = cms.string("TPBLadderLayer4"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer4"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer4"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer4"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer4"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPBLadderLayer4"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
### EndCap Pixel Panel Disk1
cms.PSet(alignableId = cms.string("TPEPanelDisk1"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk1"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk1"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk1"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk1"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk1"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
### EndCap Pixel Panel Disk2
cms.PSet(alignableId = cms.string("TPEPanelDisk2"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk2"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk2"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk2"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk2"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk2"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
### EndCap Pixel Panel Disk3
cms.PSet(alignableId = cms.string("TPEPanelDisk3"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk3"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk3"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk3"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk3"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDisk3"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
### EndCap Pixel Panel DiskM1
cms.PSet(alignableId = cms.string("TPEPanelDiskM1"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM1"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM1"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM1"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM1"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM1"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
### EndCap Pixel Panel DiskM2
cms.PSet(alignableId = cms.string("TPEPanelDiskM2"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM2"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM2"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM2"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM2"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM2"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
### EndCap Pixel Panel DiskM3
cms.PSet(alignableId = cms.string("TPEPanelDiskM3"),
DOF = cms.string("X"),
cut = cms.double(5.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM3"),
DOF = cms.string("thetaX"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM3"),
DOF = cms.string("Y"),
cut = cms.double(10.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM3"),
DOF = cms.string("thetaY"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM3"),
DOF = cms.string("Z"),
cut = cms.double(15.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
),
cms.PSet(alignableId = cms.string("TPEPanelDiskM3"),
DOF = cms.string("thetaZ"),
cut = cms.double(30.0),
sigCut = cms.double(2.5),
fractionCut = cms.double(0.25),
maxMoveCut = cms.double(100000.0),
maxErrorCut = cms.double(10000.0)
)
)
| 44.482866 | 126 | 0.40395 | 3,691 | 42,837 | 4.68816 | 0.019507 | 0.262136 | 0.112344 | 0.131068 | 0.992892 | 0.992892 | 0.992892 | 0.992892 | 0.992892 | 0.952785 | 0 | 0.089459 | 0.479142 | 42,837 | 962 | 127 | 44.529106 | 0.686088 | 0.012326 | 0 | 0.992788 | 0 | 0 | 0.051408 | 0.014769 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.002404 | 0 | 0.002404 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
b56689e8e31356c4519d2c38abafa69490224ed1 | 9,844 | py | Python | model_architectures.py | Utsav-Patel/The-Imitation-Game | 09dfaffdf917c1adfb1d8cd3e09a216b9a014e52 | [
"MIT"
] | null | null | null | model_architectures.py | Utsav-Patel/The-Imitation-Game | 09dfaffdf917c1adfb1d8cd3e09a216b9a014e52 | [
"MIT"
] | null | null | null | model_architectures.py | Utsav-Patel/The-Imitation-Game | 09dfaffdf917c1adfb1d8cd3e09a216b9a014e52 | [
"MIT"
] | null | null | null | import tensorflow as tf
from tensorflow.keras import layers, models
def create_model_project1_dense_10x10():
model = tf.keras.models.Sequential([
# tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(2048, activation='relu', input_shape=(100,)),
tf.keras.layers.Dense(1024, activation='relu'),
tf.keras.layers.Dense(512, activation='relu'),
tf.keras.layers.Dense(256, activation='relu'),
tf.keras.layers.Dense(128, activation='relu'),
# tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(32, activation='relu'),
tf.keras.layers.Dense(16, activation='relu'),
tf.keras.layers.Dense(5, activation='softmax'),
# tf.keras.layers.Softmax()
])
loss_fn = tf.keras.losses.CategoricalCrossentropy(from_logits=True)
adam = tf.keras.optimizers.Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-07, amsgrad=False,
name='Adam')
model.compile(optimizer=adam,
loss=loss_fn,
metrics=['accuracy'])
return model
def create_model_project3_dense_10x10():
model = tf.keras.models.Sequential([
# tf.keras.layers.Flatten(input_shape=(28, 28)),
#tf.keras.layers.Dense(2048, activation='relu'),
#tf.keras.layers.Dense(1024, activation='relu'),
#tf.keras.layers.Dense(512, activation='relu'),
tf.keras.layers.Dense(256, activation='relu', input_shape=(200,)),
tf.keras.layers.Dense(128, activation='relu'),
# tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(32, activation='relu'),
tf.keras.layers.Dense(16, activation='relu'),
tf.keras.layers.Dense(5, activation='softmax'),
# tf.keras.layers.Softmax()
])
loss_fn = tf.keras.losses.CategoricalCrossentropy(from_logits=True)
adam = tf.keras.optimizers.Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-07, amsgrad=False,
name='Adam')
model.compile(optimizer=adam,
loss=loss_fn,
metrics=['accuracy'])
return model
def create_model_project1_dense_20x20():
model = tf.keras.models.Sequential([
# tf.keras.layers.Flatten(input_shape=(28, 28)),
# tf.keras.layers.Dense(4096, activation='relu', input_shape=(400,)),
# tf.keras.layers.Dense(2048, activation='relu'),
# tf.keras.layers.Dense(1024, activation='relu'),
# tf.keras.layers.Dense(512, activation='relu'),
tf.keras.layers.Dense(256, activation='relu', input_shape=(400,)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(32, activation='relu'),
tf.keras.layers.Dense(16, activation='relu'),
tf.keras.layers.Dense(4, activation='softmax'),
])
loss_fn = tf.keras.losses.CategoricalCrossentropy(from_logits=True)
adam = tf.keras.optimizers.Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-07, amsgrad=False,
name='Adam')
model.compile(optimizer=adam,
loss=loss_fn,
metrics=['accuracy'])
return model
def create_model_project1_dense_50x50():
model = tf.keras.models.Sequential([
# tf.keras.layers.Flatten(input_shape=(28, 28)),
# tf.keras.layers.Dense(4096, activation='relu'),
# tf.keras.layers.Dense(2048, activation='relu'),
# tf.keras.layers.Dense(1024, activation='relu'),
# tf.keras.layers.Dense(512, activation='relu'),
tf.keras.layers.Dense(256, activation='relu', input_shape=(2500,)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(32, activation='relu'),
tf.keras.layers.Dense(16, activation='relu'),
tf.keras.layers.Dense(4, activation='softmax'),
])
loss_fn = tf.keras.losses.CategoricalCrossentropy(from_logits=True)
adam = tf.keras.optimizers.Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-07, amsgrad=False,
name='Adam')
model.compile(optimizer=adam,
loss=loss_fn,
metrics=['accuracy'])
return model
def create_model_project3_cnn_10x10():
model = models.Sequential()
model.add(layers.Conv2D(64, (3, 3), padding='same', activation='relu', data_format="channels_first",
input_shape=(3, 10, 10)))
model.add(layers.MaxPooling2D((2, 2), data_format="channels_first"))
model.add(layers.Conv2D(32, (3, 3), padding='same', activation='relu', data_format="channels_first"))
model.add(layers.MaxPooling2D((2, 2), data_format="channels_first"))
model.add(layers.Conv2D(16, (3, 3), padding='same', activation='relu', data_format="channels_first"))
model.add(layers.Flatten())
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(5, activation='softmax'))
loss_fn = tf.keras.losses.CategoricalCrossentropy()
adam = tf.keras.optimizers.Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-07, amsgrad=False,
name='Adam')
model.compile(optimizer=adam,
loss=loss_fn,
metrics=['accuracy'])
return model
def create_model_project1_cnn_20x20():
model = models.Sequential()
model.add(layers.Conv2D(64, (3, 3), padding='same', activation='relu', data_format="channels_first",
input_shape=(3, 20, 20)))
model.add(layers.MaxPooling2D((2, 2), data_format="channels_first"))
model.add(layers.Conv2D(32, (3, 3), padding='same', activation='relu', data_format="channels_first"))
model.add(layers.MaxPooling2D((2, 2), data_format="channels_first"))
model.add(layers.Conv2D(16, (3, 3), padding='same', activation='relu', data_format="channels_first"))
model.add(layers.Flatten())
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(4, activation='softmax'))
loss_fn = tf.keras.losses.CategoricalCrossentropy()
adam = tf.keras.optimizers.Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-07, amsgrad=False,
name='Adam')
model.compile(optimizer=adam,
loss=loss_fn,
metrics=['accuracy'])
return model
def create_model_project1_cnn_50x50():
model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), padding='same', activation='relu', data_format="channels_first",
input_shape=(3, 50, 50)))
model.add(layers.MaxPooling2D((2, 2), data_format="channels_first"))
model.add(layers.Conv2D(64, (3, 3), padding='same', activation='relu', data_format="channels_first"))
model.add(layers.MaxPooling2D((2, 2), data_format="channels_first"))
model.add(layers.Conv2D(64, (3, 3), padding='same', activation='relu', data_format="channels_first"))
model.add(layers.Flatten())
model.add(layers.Dense(128, activation='relu'))
model.add(layers.Dense(4, activation='softmax'))
loss_fn = tf.keras.losses.CategoricalCrossentropy()
adam = tf.keras.optimizers.Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-07, amsgrad=False,
name='Adam')
model.compile(optimizer=adam,
loss=loss_fn,
metrics=['accuracy'])
return model
def create_model_project2_dense_20x20():
model = tf.keras.models.Sequential([
# tf.keras.layers.Flatten(input_shape=(28, 28)),
# tf.keras.layers.Dense(4096, activation='relu', input_shape=(400,)),
# tf.keras.layers.Dense(2048, activation='relu'),
# tf.keras.layers.Dense(1024, activation='relu'),
# tf.keras.layers.Dense(512, activation='relu'),
# tf.keras.layers.Dense(256, activation='relu'),
# tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(64, activation='relu', input_shape=(2000,)),
tf.keras.layers.Dense(32, activation='relu'),
tf.keras.layers.Dense(16, activation='relu'),
tf.keras.layers.Dense(4, activation='softmax'),
])
loss_fn = tf.keras.losses.CategoricalCrossentropy()
adam = tf.keras.optimizers.Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-07, amsgrad=False,
name='Adam')
model.compile(optimizer=adam,
loss=loss_fn,
metrics=['accuracy'])
return model
def create_model_project2_cnn_20x20():
model = models.Sequential()
model.add(layers.Conv2D(16, (3, 3), padding='same', activation='relu', data_format="channels_first",
input_shape=(6, 20, 20)))
model.add(layers.MaxPooling2D((2, 2), data_format="channels_first"))
model.add(layers.Conv2D(8, (3, 3), padding='same', activation='relu', data_format="channels_first"))
model.add(layers.MaxPooling2D((2, 2), data_format="channels_first"))
model.add(layers.Conv2D(4, (3, 3), padding='same', activation='relu', data_format="channels_first"))
model.add(layers.Flatten())
model.add(layers.Dense(16, activation='relu'))
model.add(layers.Dense(4, activation='softmax'))
loss_fn = tf.keras.losses.CategoricalCrossentropy()
adam = tf.keras.optimizers.Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-07, amsgrad=False,
name='Adam')
model.compile(optimizer=adam,
loss=loss_fn,
metrics=['accuracy'])
return model
| 42.431034 | 112 | 0.630841 | 1,251 | 9,844 | 4.851319 | 0.069544 | 0.092272 | 0.122096 | 0.142363 | 0.980227 | 0.975614 | 0.975614 | 0.972154 | 0.972154 | 0.966057 | 0 | 0.060933 | 0.209772 | 9,844 | 231 | 113 | 42.614719 | 0.719244 | 0.121191 | 0 | 0.806452 | 0 | 0 | 0.077351 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058065 | false | 0 | 0.012903 | 0 | 0.129032 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8d36cdddf7a73baea4cc385b5326ace216c8c9af | 4,149 | py | Python | src/hernquist.py | aneeshnaik/HernquistFlows | 7f81f9b47297b115ae6b593593aac59afafc48b3 | [
"MIT"
] | null | null | null | src/hernquist.py | aneeshnaik/HernquistFlows | 7f81f9b47297b115ae6b593593aac59afafc48b3 | [
"MIT"
] | null | null | null | src/hernquist.py | aneeshnaik/HernquistFlows | 7f81f9b47297b115ae6b593593aac59afafc48b3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Functions for isotropic and anisotropic Hernquist DF.
Created: May 2021
Author: A. P. Naik
"""
import numpy as np
from constants import pi, G
import torch
def calc_DF_iso(q, p, M, a):
"""
Evaluate isotropic DF corresponding to Hernquist model.
Parameters
----------
q : np.array or torch.tensor, shape (N, 3) or (3)
Positions at which to evaluate DF. Either an array shaped (N, 3) for N
different phase points, or shape (3) for single phase point.
UNITS: metres.
p : np.array or torch.tensor, shape (N, 3) or (3)
Velocities at which to evaluate DF. UNITS: m/s.
M : float
Mass of Hernquist profile. UNITS: kg.
a : float
Scale radius of Hernquist profile. UNITS: m.
Returns
-------
f : np.array or torch.tensor (shape (N)) or float
DF evaluated at given phase points. If inputs are 1D, f is float. If
inputs are 2D, f is either np.array or torch.tensor, matching type of
input. Gradient information is propagated if torch.tensor.
UNITS: m^-6 s^3.
"""
# check input shapes match
assert q.shape == p.shape
# check if inputs are 1D or 2D and whether np or torch
oneD = False
np_array = False
if q.ndim == 1:
oneD = True
if type(q) == np.ndarray:
np_array = True
# convert inputs to torch tensors if nec.
if np_array:
q = torch.tensor(q)
p = torch.tensor(p)
# get normed r, v
r = q.norm(dim=-1)
v = p.norm(dim=-1)
# get energy (KE + PE)
E = 0.5 * v**2 - G * M / (r + a)
x = E / (G * M / a)
B = torch.abs(x)
# calculate DF
const = 1 / (np.sqrt(2) * (2 * pi)**3 * (G * M * a)**(3 / 2))
prefac = torch.sqrt(B) / (1 - B)**2 * const
term1 = (1 - 2 * B) * (8 * B**2 - 8 * B - 3)
term2 = 3 * torch.arcsin(torch.sqrt(B)) / torch.sqrt(B * (1 - B))
f = prefac * (term1 + term2)
# zero out unbound and unphysical
if oneD:
if (x.item() > 0) or (x.item() < -1):
return 0.
else:
f[(x > 0) | (x < -1)] = 0
# sort out format of output
if oneD:
f = f.item()
elif np_array:
f = f.detach().numpy()
return f
def calc_DF_aniso(q, p, M, a):
"""
Evaluate anisotropic Hernquist distribution function.
Parameters
----------
q : np.array or torch.tensor, shape (N, 3) or (3)
Positions at which to evaluate DF. Either an array shaped (N, 3) for N
different phase points, or shape (3) for single phase point.
UNITS: metres.
p : np.array or torch.tensor, shape (N, 3) or (3)
Velocities at which to evaluate DF. UNITS: m/s.
M : float
Mass of Hernquist profile. UNITS: kg.
a : float
Scale radius of Hernquist profile. UNITS: m.
Returns
-------
f : np.array or torch.tensor (shape (N)) or float
DF evaluated at given phase points. If inputs are 1D, f is float. If
inputs are 2D, f is either np.array or torch.tensor, matching type of
input. Gradient information is propagated if torch.tensor.
UNITS: m^-6 s^3.
"""
# check input shapes match
assert q.shape == p.shape
# check if inputs are 1D or 2D and whether np or torch
oneD = False
np_array = False
if q.ndim == 1:
oneD = True
if type(q) == np.ndarray:
np_array = True
# convert inputs to torch tensors if nec.
if np_array:
q = torch.tensor(q)
p = torch.tensor(p)
# get normed r, v
r = q.norm(dim=-1)
v = p.norm(dim=-1)
# get energy and AM
E = 0.5 * v**2 - G * M / (r + a)
x = E / (G * M / a)
L = torch.cross(q, p).norm(dim=-1)
# calculate DF
prefac = (3 * a) / (4 * pi**3)
f = prefac * E**2 / (G**3 * M**3 * L)
# zero out unbound and unphysical
if oneD:
if (x.item() > 0) or (x.item() < -1):
return 0.
else:
f[(x > 0) | (x < -1)] = 0
# sort out format of output
if oneD:
f = f.item()
elif np_array:
f = f.detach().numpy()
return f
| 27.117647 | 78 | 0.550012 | 658 | 4,149 | 3.449848 | 0.214286 | 0.049339 | 0.031718 | 0.049339 | 0.809692 | 0.788546 | 0.788546 | 0.788546 | 0.788546 | 0.788546 | 0 | 0.028774 | 0.321523 | 4,149 | 152 | 79 | 27.296053 | 0.77762 | 0.525428 | 0 | 0.774194 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032258 | 1 | 0.032258 | false | 0 | 0.048387 | 0 | 0.145161 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8da4b8de542a68d349fd25f9f33936337b960e87 | 186 | py | Python | auth/views/__init__.py | katrze89/app_authentication_authorization | f60057d44a5a4ff1f6641fba2b2b0948b9e1bd90 | [
"MIT"
] | null | null | null | auth/views/__init__.py | katrze89/app_authentication_authorization | f60057d44a5a4ff1f6641fba2b2b0948b9e1bd90 | [
"MIT"
] | null | null | null | auth/views/__init__.py | katrze89/app_authentication_authorization | f60057d44a5a4ff1f6641fba2b2b0948b9e1bd90 | [
"MIT"
] | 1 | 2021-04-08T20:33:57.000Z | 2021-04-08T20:33:57.000Z | from auth.views.auth_views import auth_bp, initiate
from auth.views.main_views import main_bp
from auth.views.signin_views import signin_bp
from auth.views.forgot_views import forgot_bp
| 37.2 | 51 | 0.860215 | 33 | 186 | 4.606061 | 0.272727 | 0.296053 | 0.342105 | 0.197368 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.091398 | 186 | 4 | 52 | 46.5 | 0.899408 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
a5d5502f3bda581e18d96f78b0e6bc320d834866 | 533,089 | py | Python | tests/unit/gapic/compute_v1/test_instances.py | LaudateCorpus1/python-compute | a36c637f153c7b4ef49bb6a78c8b09f3746e7af1 | [
"Apache-2.0"
] | null | null | null | tests/unit/gapic/compute_v1/test_instances.py | LaudateCorpus1/python-compute | a36c637f153c7b4ef49bb6a78c8b09f3746e7af1 | [
"Apache-2.0"
] | null | null | null | tests/unit/gapic/compute_v1/test_instances.py | LaudateCorpus1/python-compute | a36c637f153c7b4ef49bb6a78c8b09f3746e7af1 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import json
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from requests import Response
from requests import Request, PreparedRequest
from requests.sessions import Session
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.compute_v1.services.instances import InstancesClient
from google.cloud.compute_v1.services.instances import pagers
from google.cloud.compute_v1.services.instances import transports
from google.cloud.compute_v1.types import compute
from google.oauth2 import service_account
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert InstancesClient._get_default_mtls_endpoint(None) is None
assert InstancesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert (
InstancesClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
InstancesClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
InstancesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert InstancesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [InstancesClient,])
def test_instances_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "compute.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name", [(transports.InstancesRestTransport, "rest"),]
)
def test_instances_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize("client_class", [InstancesClient,])
def test_instances_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "compute.googleapis.com:443"
def test_instances_client_get_transport_class():
transport = InstancesClient.get_transport_class()
available_transports = [
transports.InstancesRestTransport,
]
assert transport in available_transports
transport = InstancesClient.get_transport_class("rest")
assert transport == transports.InstancesRestTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(InstancesClient, transports.InstancesRestTransport, "rest"),],
)
@mock.patch.object(
InstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstancesClient)
)
def test_instances_client_client_options(client_class, transport_class, transport_name):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(InstancesClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(InstancesClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(InstancesClient, transports.InstancesRestTransport, "rest", "true"),
(InstancesClient, transports.InstancesRestTransport, "rest", "false"),
],
)
@mock.patch.object(
InstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstancesClient)
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_instances_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("client_class", [InstancesClient])
@mock.patch.object(
InstancesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstancesClient)
)
def test_instances_client_get_mtls_endpoint_and_cert_source(client_class):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(InstancesClient, transports.InstancesRestTransport, "rest"),],
)
def test_instances_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(InstancesClient, transports.InstancesRestTransport, "rest"),],
)
def test_instances_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"request_type", [compute.AddAccessConfigInstanceRequest, dict,]
)
def test_add_access_config_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["access_config_resource"] = {
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.add_access_config_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_add_access_config_unary_rest_required_fields(
request_type=compute.AddAccessConfigInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["network_interface"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
assert "networkInterface" not in jsonified_request
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).add_access_config._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
assert "networkInterface" in jsonified_request
assert jsonified_request["networkInterface"] == request_init["network_interface"]
jsonified_request["instance"] = "instance_value"
jsonified_request["networkInterface"] = "network_interface_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).add_access_config._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id", "network_interface",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "networkInterface" in jsonified_request
assert jsonified_request["networkInterface"] == "network_interface_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.add_access_config_unary(request)
expected_params = [
("networkInterface", "",),
]
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_add_access_config_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.add_access_config._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId", "networkInterface",))
& set(
("accessConfigResource", "instance", "networkInterface", "project", "zone",)
)
)
def test_add_access_config_unary_rest_bad_request(
transport: str = "rest", request_type=compute.AddAccessConfigInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["access_config_resource"] = {
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.add_access_config_unary(request)
def test_add_access_config_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
access_config_resource=compute.AccessConfig(
external_ipv6="external_ipv6_value"
),
)
mock_args.update(sample_request)
client.add_access_config_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addAccessConfig"
% client.transport._host,
args[1],
)
def test_add_access_config_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.add_access_config_unary(
compute.AddAccessConfigInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
access_config_resource=compute.AccessConfig(
external_ipv6="external_ipv6_value"
),
)
def test_add_access_config_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.AddResourcePoliciesInstanceRequest, dict,]
)
def test_add_resource_policies_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_add_resource_policies_request_resource"] = {
"resource_policies": ["resource_policies_value_1", "resource_policies_value_2"]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.add_resource_policies_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_add_resource_policies_unary_rest_required_fields(
request_type=compute.AddResourcePoliciesInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).add_resource_policies._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).add_resource_policies._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.add_resource_policies_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_add_resource_policies_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.add_resource_policies._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instance",
"instancesAddResourcePoliciesRequestResource",
"project",
"zone",
)
)
)
def test_add_resource_policies_unary_rest_bad_request(
transport: str = "rest", request_type=compute.AddResourcePoliciesInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_add_resource_policies_request_resource"] = {
"resource_policies": ["resource_policies_value_1", "resource_policies_value_2"]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.add_resource_policies_unary(request)
def test_add_resource_policies_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_add_resource_policies_request_resource=compute.InstancesAddResourcePoliciesRequest(
resource_policies=["resource_policies_value"]
),
)
mock_args.update(sample_request)
client.add_resource_policies_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addResourcePolicies"
% client.transport._host,
args[1],
)
def test_add_resource_policies_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.add_resource_policies_unary(
compute.AddResourcePoliciesInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_add_resource_policies_request_resource=compute.InstancesAddResourcePoliciesRequest(
resource_policies=["resource_policies_value"]
),
)
def test_add_resource_policies_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.AggregatedListInstancesRequest, dict,]
)
def test_aggregated_list_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceAggregatedList(
id="id_value",
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
unreachables=["unreachables_value"],
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceAggregatedList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.aggregated_list(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.AggregatedListPager)
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
assert response.unreachables == ["unreachables_value"]
def test_aggregated_list_rest_required_fields(
request_type=compute.AggregatedListInstancesRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).aggregated_list._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).aggregated_list._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
(
"max_results",
"include_all_scopes",
"filter",
"order_by",
"page_token",
"return_partial_success",
)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.InstanceAggregatedList()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceAggregatedList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.aggregated_list(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_aggregated_list_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.aggregated_list._get_unset_required_fields({})
assert set(unset_fields) == (
set(
(
"maxResults",
"includeAllScopes",
"filter",
"orderBy",
"pageToken",
"returnPartialSuccess",
)
)
& set(("project",))
)
def test_aggregated_list_rest_bad_request(
transport: str = "rest", request_type=compute.AggregatedListInstancesRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.aggregated_list(request)
def test_aggregated_list_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceAggregatedList()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceAggregatedList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(project="project_value",)
mock_args.update(sample_request)
client.aggregated_list(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/aggregated/instances"
% client.transport._host,
args[1],
)
def test_aggregated_list_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.aggregated_list(
compute.AggregatedListInstancesRequest(), project="project_value",
)
def test_aggregated_list_rest_pager(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.InstanceAggregatedList(
items={
"a": compute.InstancesScopedList(),
"b": compute.InstancesScopedList(),
"c": compute.InstancesScopedList(),
},
next_page_token="abc",
),
compute.InstanceAggregatedList(items={}, next_page_token="def",),
compute.InstanceAggregatedList(
items={"g": compute.InstancesScopedList(),}, next_page_token="ghi",
),
compute.InstanceAggregatedList(
items={
"h": compute.InstancesScopedList(),
"i": compute.InstancesScopedList(),
},
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.InstanceAggregatedList.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {"project": "sample1"}
pager = client.aggregated_list(request=sample_request)
assert isinstance(pager.get("a"), compute.InstancesScopedList)
assert pager.get("h") is None
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, tuple) for i in results)
for result in results:
assert isinstance(result, tuple)
assert tuple(type(t) for t in result) == (str, compute.InstancesScopedList)
assert pager.get("a") is None
assert isinstance(pager.get("h"), compute.InstancesScopedList)
pages = list(client.aggregated_list(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize("request_type", [compute.AttachDiskInstanceRequest, dict,])
def test_attach_disk_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["attached_disk_resource"] = {
"auto_delete": True,
"boot": True,
"device_name": "device_name_value",
"disk_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"disk_size_gb": 1261,
"guest_os_features": [{"type_": "type__value"}],
"index": 536,
"initialize_params": {
"description": "description_value",
"disk_name": "disk_name_value",
"disk_size_gb": 1261,
"disk_type": "disk_type_value",
"labels": {},
"on_update_action": "on_update_action_value",
"provisioned_iops": 1740,
"resource_policies": [
"resource_policies_value_1",
"resource_policies_value_2",
],
"source_image": "source_image_value",
"source_image_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"source_snapshot": "source_snapshot_value",
"source_snapshot_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
},
"interface": "interface_value",
"kind": "kind_value",
"licenses": ["licenses_value_1", "licenses_value_2"],
"mode": "mode_value",
"shielded_instance_initial_state": {
"dbs": [{"content": "content_value", "file_type": "file_type_value"}],
"dbxs": [{"content": "content_value", "file_type": "file_type_value"}],
"keks": [{"content": "content_value", "file_type": "file_type_value"}],
"pk": {"content": "content_value", "file_type": "file_type_value"},
},
"source": "source_value",
"type_": "type__value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.attach_disk_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_attach_disk_unary_rest_required_fields(
request_type=compute.AttachDiskInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).attach_disk._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).attach_disk._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id", "force_attach",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.attach_disk_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_attach_disk_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.attach_disk._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId", "forceAttach",))
& set(("attachedDiskResource", "instance", "project", "zone",))
)
def test_attach_disk_unary_rest_bad_request(
transport: str = "rest", request_type=compute.AttachDiskInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["attached_disk_resource"] = {
"auto_delete": True,
"boot": True,
"device_name": "device_name_value",
"disk_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"disk_size_gb": 1261,
"guest_os_features": [{"type_": "type__value"}],
"index": 536,
"initialize_params": {
"description": "description_value",
"disk_name": "disk_name_value",
"disk_size_gb": 1261,
"disk_type": "disk_type_value",
"labels": {},
"on_update_action": "on_update_action_value",
"provisioned_iops": 1740,
"resource_policies": [
"resource_policies_value_1",
"resource_policies_value_2",
],
"source_image": "source_image_value",
"source_image_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"source_snapshot": "source_snapshot_value",
"source_snapshot_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
},
"interface": "interface_value",
"kind": "kind_value",
"licenses": ["licenses_value_1", "licenses_value_2"],
"mode": "mode_value",
"shielded_instance_initial_state": {
"dbs": [{"content": "content_value", "file_type": "file_type_value"}],
"dbxs": [{"content": "content_value", "file_type": "file_type_value"}],
"keks": [{"content": "content_value", "file_type": "file_type_value"}],
"pk": {"content": "content_value", "file_type": "file_type_value"},
},
"source": "source_value",
"type_": "type__value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.attach_disk_unary(request)
def test_attach_disk_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
attached_disk_resource=compute.AttachedDisk(auto_delete=True),
)
mock_args.update(sample_request)
client.attach_disk_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/attachDisk"
% client.transport._host,
args[1],
)
def test_attach_disk_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.attach_disk_unary(
compute.AttachDiskInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
attached_disk_resource=compute.AttachedDisk(auto_delete=True),
)
def test_attach_disk_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.BulkInsertInstanceRequest, dict,])
def test_bulk_insert_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request_init["bulk_insert_instance_resource_resource"] = {
"count": 553,
"instance_properties": {
"advanced_machine_features": {
"enable_nested_virtualization": True,
"threads_per_core": 1689,
},
"can_ip_forward": True,
"confidential_instance_config": {"enable_confidential_compute": True},
"description": "description_value",
"disks": [
{
"auto_delete": True,
"boot": True,
"device_name": "device_name_value",
"disk_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"disk_size_gb": 1261,
"guest_os_features": [{"type_": "type__value"}],
"index": 536,
"initialize_params": {
"description": "description_value",
"disk_name": "disk_name_value",
"disk_size_gb": 1261,
"disk_type": "disk_type_value",
"labels": {},
"on_update_action": "on_update_action_value",
"provisioned_iops": 1740,
"resource_policies": [
"resource_policies_value_1",
"resource_policies_value_2",
],
"source_image": "source_image_value",
"source_image_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"source_snapshot": "source_snapshot_value",
"source_snapshot_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
},
"interface": "interface_value",
"kind": "kind_value",
"licenses": ["licenses_value_1", "licenses_value_2"],
"mode": "mode_value",
"shielded_instance_initial_state": {
"dbs": [
{"content": "content_value", "file_type": "file_type_value"}
],
"dbxs": [
{"content": "content_value", "file_type": "file_type_value"}
],
"keks": [
{"content": "content_value", "file_type": "file_type_value"}
],
"pk": {
"content": "content_value",
"file_type": "file_type_value",
},
},
"source": "source_value",
"type_": "type__value",
}
],
"guest_accelerators": [
{
"accelerator_count": 1805,
"accelerator_type": "accelerator_type_value",
}
],
"labels": {},
"machine_type": "machine_type_value",
"metadata": {
"fingerprint": "fingerprint_value",
"items": [{"key": "key_value", "value": "value_value"}],
"kind": "kind_value",
},
"min_cpu_platform": "min_cpu_platform_value",
"network_interfaces": [
{
"access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"alias_ip_ranges": [
{
"ip_cidr_range": "ip_cidr_range_value",
"subnetwork_range_name": "subnetwork_range_name_value",
}
],
"fingerprint": "fingerprint_value",
"ipv6_access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"ipv6_access_type": "ipv6_access_type_value",
"ipv6_address": "ipv6_address_value",
"kind": "kind_value",
"name": "name_value",
"network": "network_value",
"network_i_p": "network_i_p_value",
"nic_type": "nic_type_value",
"queue_count": 1197,
"stack_type": "stack_type_value",
"subnetwork": "subnetwork_value",
}
],
"private_ipv6_google_access": "private_ipv6_google_access_value",
"reservation_affinity": {
"consume_reservation_type": "consume_reservation_type_value",
"key": "key_value",
"values": ["values_value_1", "values_value_2"],
},
"resource_policies": [
"resource_policies_value_1",
"resource_policies_value_2",
],
"scheduling": {
"automatic_restart": True,
"location_hint": "location_hint_value",
"min_node_cpus": 1379,
"node_affinities": [
{
"key": "key_value",
"operator": "operator_value",
"values": ["values_value_1", "values_value_2"],
}
],
"on_host_maintenance": "on_host_maintenance_value",
"preemptible": True,
},
"service_accounts": [
{"email": "email_value", "scopes": ["scopes_value_1", "scopes_value_2"]}
],
"shielded_instance_config": {
"enable_integrity_monitoring": True,
"enable_secure_boot": True,
"enable_vtpm": True,
},
"tags": {
"fingerprint": "fingerprint_value",
"items": ["items_value_1", "items_value_2"],
},
},
"location_policy": {"locations": {}},
"min_count": 972,
"name_pattern": "name_pattern_value",
"per_instance_properties": {},
"source_instance_template": "source_instance_template_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.bulk_insert_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_bulk_insert_unary_rest_required_fields(
request_type=compute.BulkInsertInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).bulk_insert._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).bulk_insert._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.bulk_insert_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_bulk_insert_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.bulk_insert._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(("bulkInsertInstanceResourceResource", "project", "zone",))
)
def test_bulk_insert_unary_rest_bad_request(
transport: str = "rest", request_type=compute.BulkInsertInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request_init["bulk_insert_instance_resource_resource"] = {
"count": 553,
"instance_properties": {
"advanced_machine_features": {
"enable_nested_virtualization": True,
"threads_per_core": 1689,
},
"can_ip_forward": True,
"confidential_instance_config": {"enable_confidential_compute": True},
"description": "description_value",
"disks": [
{
"auto_delete": True,
"boot": True,
"device_name": "device_name_value",
"disk_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"disk_size_gb": 1261,
"guest_os_features": [{"type_": "type__value"}],
"index": 536,
"initialize_params": {
"description": "description_value",
"disk_name": "disk_name_value",
"disk_size_gb": 1261,
"disk_type": "disk_type_value",
"labels": {},
"on_update_action": "on_update_action_value",
"provisioned_iops": 1740,
"resource_policies": [
"resource_policies_value_1",
"resource_policies_value_2",
],
"source_image": "source_image_value",
"source_image_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"source_snapshot": "source_snapshot_value",
"source_snapshot_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
},
"interface": "interface_value",
"kind": "kind_value",
"licenses": ["licenses_value_1", "licenses_value_2"],
"mode": "mode_value",
"shielded_instance_initial_state": {
"dbs": [
{"content": "content_value", "file_type": "file_type_value"}
],
"dbxs": [
{"content": "content_value", "file_type": "file_type_value"}
],
"keks": [
{"content": "content_value", "file_type": "file_type_value"}
],
"pk": {
"content": "content_value",
"file_type": "file_type_value",
},
},
"source": "source_value",
"type_": "type__value",
}
],
"guest_accelerators": [
{
"accelerator_count": 1805,
"accelerator_type": "accelerator_type_value",
}
],
"labels": {},
"machine_type": "machine_type_value",
"metadata": {
"fingerprint": "fingerprint_value",
"items": [{"key": "key_value", "value": "value_value"}],
"kind": "kind_value",
},
"min_cpu_platform": "min_cpu_platform_value",
"network_interfaces": [
{
"access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"alias_ip_ranges": [
{
"ip_cidr_range": "ip_cidr_range_value",
"subnetwork_range_name": "subnetwork_range_name_value",
}
],
"fingerprint": "fingerprint_value",
"ipv6_access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"ipv6_access_type": "ipv6_access_type_value",
"ipv6_address": "ipv6_address_value",
"kind": "kind_value",
"name": "name_value",
"network": "network_value",
"network_i_p": "network_i_p_value",
"nic_type": "nic_type_value",
"queue_count": 1197,
"stack_type": "stack_type_value",
"subnetwork": "subnetwork_value",
}
],
"private_ipv6_google_access": "private_ipv6_google_access_value",
"reservation_affinity": {
"consume_reservation_type": "consume_reservation_type_value",
"key": "key_value",
"values": ["values_value_1", "values_value_2"],
},
"resource_policies": [
"resource_policies_value_1",
"resource_policies_value_2",
],
"scheduling": {
"automatic_restart": True,
"location_hint": "location_hint_value",
"min_node_cpus": 1379,
"node_affinities": [
{
"key": "key_value",
"operator": "operator_value",
"values": ["values_value_1", "values_value_2"],
}
],
"on_host_maintenance": "on_host_maintenance_value",
"preemptible": True,
},
"service_accounts": [
{"email": "email_value", "scopes": ["scopes_value_1", "scopes_value_2"]}
],
"shielded_instance_config": {
"enable_integrity_monitoring": True,
"enable_secure_boot": True,
"enable_vtpm": True,
},
"tags": {
"fingerprint": "fingerprint_value",
"items": ["items_value_1", "items_value_2"],
},
},
"location_policy": {"locations": {}},
"min_count": 972,
"name_pattern": "name_pattern_value",
"per_instance_properties": {},
"source_instance_template": "source_instance_template_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.bulk_insert_unary(request)
def test_bulk_insert_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "zone": "sample2"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource(
count=553
),
)
mock_args.update(sample_request)
client.bulk_insert_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/bulkInsert"
% client.transport._host,
args[1],
)
def test_bulk_insert_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.bulk_insert_unary(
compute.BulkInsertInstanceRequest(),
project="project_value",
zone="zone_value",
bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource(
count=553
),
)
def test_bulk_insert_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.DeleteInstanceRequest, dict,])
def test_delete_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_delete_unary_rest_required_fields(request_type=compute.DeleteInstanceRequest):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "delete",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_delete_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.delete._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",)) & set(("instance", "project", "zone",))
)
def test_delete_unary_rest_bad_request(
transport: str = "rest", request_type=compute.DeleteInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.delete_unary(request)
def test_delete_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.delete_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}"
% client.transport._host,
args[1],
)
def test_delete_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_unary(
compute.DeleteInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_delete_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.DeleteAccessConfigInstanceRequest, dict,]
)
def test_delete_access_config_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_access_config_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_delete_access_config_unary_rest_required_fields(
request_type=compute.DeleteAccessConfigInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["access_config"] = ""
request_init["instance"] = ""
request_init["network_interface"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
assert "accessConfig" not in jsonified_request
assert "networkInterface" not in jsonified_request
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete_access_config._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
assert "accessConfig" in jsonified_request
assert jsonified_request["accessConfig"] == request_init["access_config"]
assert "networkInterface" in jsonified_request
assert jsonified_request["networkInterface"] == request_init["network_interface"]
jsonified_request["accessConfig"] = "access_config_value"
jsonified_request["instance"] = "instance_value"
jsonified_request["networkInterface"] = "network_interface_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).delete_access_config._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
("access_config", "request_id", "network_interface",)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "accessConfig" in jsonified_request
assert jsonified_request["accessConfig"] == "access_config_value"
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "networkInterface" in jsonified_request
assert jsonified_request["networkInterface"] == "network_interface_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.delete_access_config_unary(request)
expected_params = [
("accessConfig", "",),
("networkInterface", "",),
]
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_delete_access_config_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.delete_access_config._get_unset_required_fields({})
assert set(unset_fields) == (
set(("accessConfig", "requestId", "networkInterface",))
& set(("accessConfig", "instance", "networkInterface", "project", "zone",))
)
def test_delete_access_config_unary_rest_bad_request(
transport: str = "rest", request_type=compute.DeleteAccessConfigInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.delete_access_config_unary(request)
def test_delete_access_config_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
access_config="access_config_value",
network_interface="network_interface_value",
)
mock_args.update(sample_request)
client.delete_access_config_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/deleteAccessConfig"
% client.transport._host,
args[1],
)
def test_delete_access_config_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_access_config_unary(
compute.DeleteAccessConfigInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
access_config="access_config_value",
network_interface="network_interface_value",
)
def test_delete_access_config_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.DetachDiskInstanceRequest, dict,])
def test_detach_disk_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.detach_disk_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_detach_disk_unary_rest_required_fields(
request_type=compute.DetachDiskInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["device_name"] = ""
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
assert "deviceName" not in jsonified_request
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).detach_disk._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
assert "deviceName" in jsonified_request
assert jsonified_request["deviceName"] == request_init["device_name"]
jsonified_request["deviceName"] = "device_name_value"
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).detach_disk._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("device_name", "request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "deviceName" in jsonified_request
assert jsonified_request["deviceName"] == "device_name_value"
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.detach_disk_unary(request)
expected_params = [
("deviceName", "",),
]
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_detach_disk_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.detach_disk._get_unset_required_fields({})
assert set(unset_fields) == (
set(("deviceName", "requestId",))
& set(("deviceName", "instance", "project", "zone",))
)
def test_detach_disk_unary_rest_bad_request(
transport: str = "rest", request_type=compute.DetachDiskInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.detach_disk_unary(request)
def test_detach_disk_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
device_name="device_name_value",
)
mock_args.update(sample_request)
client.detach_disk_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/detachDisk"
% client.transport._host,
args[1],
)
def test_detach_disk_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.detach_disk_unary(
compute.DetachDiskInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
device_name="device_name_value",
)
def test_detach_disk_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.GetInstanceRequest, dict,])
def test_get_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Instance(
can_ip_forward=True,
cpu_platform="cpu_platform_value",
creation_timestamp="creation_timestamp_value",
deletion_protection=True,
description="description_value",
fingerprint="fingerprint_value",
hostname="hostname_value",
id=205,
kind="kind_value",
label_fingerprint="label_fingerprint_value",
last_start_timestamp="last_start_timestamp_value",
last_stop_timestamp="last_stop_timestamp_value",
last_suspended_timestamp="last_suspended_timestamp_value",
machine_type="machine_type_value",
min_cpu_platform="min_cpu_platform_value",
name="name_value",
private_ipv6_google_access="private_ipv6_google_access_value",
resource_policies=["resource_policies_value"],
satisfies_pzs=True,
self_link="self_link_value",
start_restricted=True,
status="status_value",
status_message="status_message_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Instance.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Instance)
assert response.can_ip_forward is True
assert response.cpu_platform == "cpu_platform_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.deletion_protection is True
assert response.description == "description_value"
assert response.fingerprint == "fingerprint_value"
assert response.hostname == "hostname_value"
assert response.id == 205
assert response.kind == "kind_value"
assert response.label_fingerprint == "label_fingerprint_value"
assert response.last_start_timestamp == "last_start_timestamp_value"
assert response.last_stop_timestamp == "last_stop_timestamp_value"
assert response.last_suspended_timestamp == "last_suspended_timestamp_value"
assert response.machine_type == "machine_type_value"
assert response.min_cpu_platform == "min_cpu_platform_value"
assert response.name == "name_value"
assert response.private_ipv6_google_access == "private_ipv6_google_access_value"
assert response.resource_policies == ["resource_policies_value"]
assert response.satisfies_pzs is True
assert response.self_link == "self_link_value"
assert response.start_restricted is True
assert response.status == "status_value"
assert response.status_message == "status_message_value"
assert response.zone == "zone_value"
def test_get_rest_required_fields(request_type=compute.GetInstanceRequest):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Instance()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Instance.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get._get_unset_required_fields({})
assert set(unset_fields) == (set(()) & set(("instance", "project", "zone",)))
def test_get_rest_bad_request(
transport: str = "rest", request_type=compute.GetInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get(request)
def test_get_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Instance()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Instance.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.get(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}"
% client.transport._host,
args[1],
)
def test_get_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get(
compute.GetInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_get_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.GetEffectiveFirewallsInstanceRequest, dict,]
)
def test_get_effective_firewalls_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstancesGetEffectiveFirewallsResponse()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstancesGetEffectiveFirewallsResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_effective_firewalls(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.InstancesGetEffectiveFirewallsResponse)
def test_get_effective_firewalls_rest_required_fields(
request_type=compute.GetEffectiveFirewallsInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["network_interface"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
assert "networkInterface" not in jsonified_request
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_effective_firewalls._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
assert "networkInterface" in jsonified_request
assert jsonified_request["networkInterface"] == request_init["network_interface"]
jsonified_request["instance"] = "instance_value"
jsonified_request["networkInterface"] = "network_interface_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_effective_firewalls._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("network_interface",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "networkInterface" in jsonified_request
assert jsonified_request["networkInterface"] == "network_interface_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.InstancesGetEffectiveFirewallsResponse()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstancesGetEffectiveFirewallsResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_effective_firewalls(request)
expected_params = [
("networkInterface", "",),
]
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_effective_firewalls_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get_effective_firewalls._get_unset_required_fields({})
assert set(unset_fields) == (
set(("networkInterface",))
& set(("instance", "networkInterface", "project", "zone",))
)
def test_get_effective_firewalls_rest_bad_request(
transport: str = "rest", request_type=compute.GetEffectiveFirewallsInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_effective_firewalls(request)
def test_get_effective_firewalls_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstancesGetEffectiveFirewallsResponse()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstancesGetEffectiveFirewallsResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
)
mock_args.update(sample_request)
client.get_effective_firewalls(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getEffectiveFirewalls"
% client.transport._host,
args[1],
)
def test_get_effective_firewalls_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_effective_firewalls(
compute.GetEffectiveFirewallsInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
)
def test_get_effective_firewalls_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.GetGuestAttributesInstanceRequest, dict,]
)
def test_get_guest_attributes_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.GuestAttributes(
kind="kind_value",
query_path="query_path_value",
self_link="self_link_value",
variable_key="variable_key_value",
variable_value="variable_value_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.GuestAttributes.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_guest_attributes(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.GuestAttributes)
assert response.kind == "kind_value"
assert response.query_path == "query_path_value"
assert response.self_link == "self_link_value"
assert response.variable_key == "variable_key_value"
assert response.variable_value == "variable_value_value"
def test_get_guest_attributes_rest_required_fields(
request_type=compute.GetGuestAttributesInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_guest_attributes._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_guest_attributes._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("variable_key", "query_path",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.GuestAttributes()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.GuestAttributes.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_guest_attributes(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_guest_attributes_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get_guest_attributes._get_unset_required_fields({})
assert set(unset_fields) == (
set(("variableKey", "queryPath",)) & set(("instance", "project", "zone",))
)
def test_get_guest_attributes_rest_bad_request(
transport: str = "rest", request_type=compute.GetGuestAttributesInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_guest_attributes(request)
def test_get_guest_attributes_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.GuestAttributes()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.GuestAttributes.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.get_guest_attributes(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getGuestAttributes"
% client.transport._host,
args[1],
)
def test_get_guest_attributes_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_guest_attributes(
compute.GetGuestAttributesInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_get_guest_attributes_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.GetIamPolicyInstanceRequest, dict,])
def test_get_iam_policy_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Policy.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_iam_policy(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Policy)
assert response.etag == "etag_value"
assert response.iam_owned is True
assert response.version == 774
def test_get_iam_policy_rest_required_fields(
request_type=compute.GetIamPolicyInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["project"] = ""
request_init["resource"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_iam_policy._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["resource"] = "resource_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_iam_policy._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("options_requested_policy_version",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "resource" in jsonified_request
assert jsonified_request["resource"] == "resource_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Policy()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Policy.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_iam_policy(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_iam_policy_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get_iam_policy._get_unset_required_fields({})
assert set(unset_fields) == (
set(("optionsRequestedPolicyVersion",)) & set(("project", "resource", "zone",))
)
def test_get_iam_policy_rest_bad_request(
transport: str = "rest", request_type=compute.GetIamPolicyInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_iam_policy(request)
def test_get_iam_policy_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Policy()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Policy.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"resource": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", resource="resource_value",
)
mock_args.update(sample_request)
client.get_iam_policy(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/getIamPolicy"
% client.transport._host,
args[1],
)
def test_get_iam_policy_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_iam_policy(
compute.GetIamPolicyInstanceRequest(),
project="project_value",
zone="zone_value",
resource="resource_value",
)
def test_get_iam_policy_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.GetScreenshotInstanceRequest, dict,])
def test_get_screenshot_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Screenshot(contents="contents_value", kind="kind_value",)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Screenshot.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_screenshot(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Screenshot)
assert response.contents == "contents_value"
assert response.kind == "kind_value"
def test_get_screenshot_rest_required_fields(
request_type=compute.GetScreenshotInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_screenshot._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_screenshot._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Screenshot()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Screenshot.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_screenshot(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_screenshot_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get_screenshot._get_unset_required_fields({})
assert set(unset_fields) == (set(()) & set(("instance", "project", "zone",)))
def test_get_screenshot_rest_bad_request(
transport: str = "rest", request_type=compute.GetScreenshotInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_screenshot(request)
def test_get_screenshot_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Screenshot()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Screenshot.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.get_screenshot(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/screenshot"
% client.transport._host,
args[1],
)
def test_get_screenshot_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_screenshot(
compute.GetScreenshotInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_get_screenshot_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.GetSerialPortOutputInstanceRequest, dict,]
)
def test_get_serial_port_output_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SerialPortOutput(
contents="contents_value",
kind="kind_value",
next_=542,
self_link="self_link_value",
start=558,
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.SerialPortOutput.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_serial_port_output(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.SerialPortOutput)
assert response.contents == "contents_value"
assert response.kind == "kind_value"
assert response.next_ == 542
assert response.self_link == "self_link_value"
assert response.start == 558
def test_get_serial_port_output_rest_required_fields(
request_type=compute.GetSerialPortOutputInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_serial_port_output._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_serial_port_output._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("port", "start",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.SerialPortOutput()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.SerialPortOutput.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_serial_port_output(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_serial_port_output_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get_serial_port_output._get_unset_required_fields({})
assert set(unset_fields) == (
set(("port", "start",)) & set(("instance", "project", "zone",))
)
def test_get_serial_port_output_rest_bad_request(
transport: str = "rest", request_type=compute.GetSerialPortOutputInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_serial_port_output(request)
def test_get_serial_port_output_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SerialPortOutput()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.SerialPortOutput.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.get_serial_port_output(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/serialPort"
% client.transport._host,
args[1],
)
def test_get_serial_port_output_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_serial_port_output(
compute.GetSerialPortOutputInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_get_serial_port_output_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.GetShieldedInstanceIdentityInstanceRequest, dict,]
)
def test_get_shielded_instance_identity_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.ShieldedInstanceIdentity(kind="kind_value",)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.ShieldedInstanceIdentity.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_shielded_instance_identity(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.ShieldedInstanceIdentity)
assert response.kind == "kind_value"
def test_get_shielded_instance_identity_rest_required_fields(
request_type=compute.GetShieldedInstanceIdentityInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_shielded_instance_identity._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get_shielded_instance_identity._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.ShieldedInstanceIdentity()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.ShieldedInstanceIdentity.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get_shielded_instance_identity(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_shielded_instance_identity_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get_shielded_instance_identity._get_unset_required_fields(
{}
)
assert set(unset_fields) == (set(()) & set(("instance", "project", "zone",)))
def test_get_shielded_instance_identity_rest_bad_request(
transport: str = "rest",
request_type=compute.GetShieldedInstanceIdentityInstanceRequest,
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get_shielded_instance_identity(request)
def test_get_shielded_instance_identity_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.ShieldedInstanceIdentity()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.ShieldedInstanceIdentity.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.get_shielded_instance_identity(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getShieldedInstanceIdentity"
% client.transport._host,
args[1],
)
def test_get_shielded_instance_identity_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_shielded_instance_identity(
compute.GetShieldedInstanceIdentityInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_get_shielded_instance_identity_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.InsertInstanceRequest, dict,])
def test_insert_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request_init["instance_resource"] = {
"advanced_machine_features": {
"enable_nested_virtualization": True,
"threads_per_core": 1689,
},
"can_ip_forward": True,
"confidential_instance_config": {"enable_confidential_compute": True},
"cpu_platform": "cpu_platform_value",
"creation_timestamp": "creation_timestamp_value",
"deletion_protection": True,
"description": "description_value",
"disks": [
{
"auto_delete": True,
"boot": True,
"device_name": "device_name_value",
"disk_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"disk_size_gb": 1261,
"guest_os_features": [{"type_": "type__value"}],
"index": 536,
"initialize_params": {
"description": "description_value",
"disk_name": "disk_name_value",
"disk_size_gb": 1261,
"disk_type": "disk_type_value",
"labels": {},
"on_update_action": "on_update_action_value",
"provisioned_iops": 1740,
"resource_policies": [
"resource_policies_value_1",
"resource_policies_value_2",
],
"source_image": "source_image_value",
"source_image_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"source_snapshot": "source_snapshot_value",
"source_snapshot_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
},
"interface": "interface_value",
"kind": "kind_value",
"licenses": ["licenses_value_1", "licenses_value_2"],
"mode": "mode_value",
"shielded_instance_initial_state": {
"dbs": [
{"content": "content_value", "file_type": "file_type_value"}
],
"dbxs": [
{"content": "content_value", "file_type": "file_type_value"}
],
"keks": [
{"content": "content_value", "file_type": "file_type_value"}
],
"pk": {"content": "content_value", "file_type": "file_type_value"},
},
"source": "source_value",
"type_": "type__value",
}
],
"display_device": {"enable_display": True},
"fingerprint": "fingerprint_value",
"guest_accelerators": [
{"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"}
],
"hostname": "hostname_value",
"id": 205,
"kind": "kind_value",
"label_fingerprint": "label_fingerprint_value",
"labels": {},
"last_start_timestamp": "last_start_timestamp_value",
"last_stop_timestamp": "last_stop_timestamp_value",
"last_suspended_timestamp": "last_suspended_timestamp_value",
"machine_type": "machine_type_value",
"metadata": {
"fingerprint": "fingerprint_value",
"items": [{"key": "key_value", "value": "value_value"}],
"kind": "kind_value",
},
"min_cpu_platform": "min_cpu_platform_value",
"name": "name_value",
"network_interfaces": [
{
"access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"alias_ip_ranges": [
{
"ip_cidr_range": "ip_cidr_range_value",
"subnetwork_range_name": "subnetwork_range_name_value",
}
],
"fingerprint": "fingerprint_value",
"ipv6_access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"ipv6_access_type": "ipv6_access_type_value",
"ipv6_address": "ipv6_address_value",
"kind": "kind_value",
"name": "name_value",
"network": "network_value",
"network_i_p": "network_i_p_value",
"nic_type": "nic_type_value",
"queue_count": 1197,
"stack_type": "stack_type_value",
"subnetwork": "subnetwork_value",
}
],
"private_ipv6_google_access": "private_ipv6_google_access_value",
"reservation_affinity": {
"consume_reservation_type": "consume_reservation_type_value",
"key": "key_value",
"values": ["values_value_1", "values_value_2"],
},
"resource_policies": ["resource_policies_value_1", "resource_policies_value_2"],
"satisfies_pzs": True,
"scheduling": {
"automatic_restart": True,
"location_hint": "location_hint_value",
"min_node_cpus": 1379,
"node_affinities": [
{
"key": "key_value",
"operator": "operator_value",
"values": ["values_value_1", "values_value_2"],
}
],
"on_host_maintenance": "on_host_maintenance_value",
"preemptible": True,
},
"self_link": "self_link_value",
"service_accounts": [
{"email": "email_value", "scopes": ["scopes_value_1", "scopes_value_2"]}
],
"shielded_instance_config": {
"enable_integrity_monitoring": True,
"enable_secure_boot": True,
"enable_vtpm": True,
},
"shielded_instance_integrity_policy": {"update_auto_learn_policy": True},
"start_restricted": True,
"status": "status_value",
"status_message": "status_message_value",
"tags": {
"fingerprint": "fingerprint_value",
"items": ["items_value_1", "items_value_2"],
},
"zone": "zone_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.insert_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_insert_unary_rest_required_fields(request_type=compute.InsertInstanceRequest):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).insert._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).insert._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id", "source_instance_template",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.insert_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_insert_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.insert._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId", "sourceInstanceTemplate",))
& set(("instanceResource", "project", "zone",))
)
def test_insert_unary_rest_bad_request(
transport: str = "rest", request_type=compute.InsertInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request_init["instance_resource"] = {
"advanced_machine_features": {
"enable_nested_virtualization": True,
"threads_per_core": 1689,
},
"can_ip_forward": True,
"confidential_instance_config": {"enable_confidential_compute": True},
"cpu_platform": "cpu_platform_value",
"creation_timestamp": "creation_timestamp_value",
"deletion_protection": True,
"description": "description_value",
"disks": [
{
"auto_delete": True,
"boot": True,
"device_name": "device_name_value",
"disk_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"disk_size_gb": 1261,
"guest_os_features": [{"type_": "type__value"}],
"index": 536,
"initialize_params": {
"description": "description_value",
"disk_name": "disk_name_value",
"disk_size_gb": 1261,
"disk_type": "disk_type_value",
"labels": {},
"on_update_action": "on_update_action_value",
"provisioned_iops": 1740,
"resource_policies": [
"resource_policies_value_1",
"resource_policies_value_2",
],
"source_image": "source_image_value",
"source_image_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"source_snapshot": "source_snapshot_value",
"source_snapshot_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
},
"interface": "interface_value",
"kind": "kind_value",
"licenses": ["licenses_value_1", "licenses_value_2"],
"mode": "mode_value",
"shielded_instance_initial_state": {
"dbs": [
{"content": "content_value", "file_type": "file_type_value"}
],
"dbxs": [
{"content": "content_value", "file_type": "file_type_value"}
],
"keks": [
{"content": "content_value", "file_type": "file_type_value"}
],
"pk": {"content": "content_value", "file_type": "file_type_value"},
},
"source": "source_value",
"type_": "type__value",
}
],
"display_device": {"enable_display": True},
"fingerprint": "fingerprint_value",
"guest_accelerators": [
{"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"}
],
"hostname": "hostname_value",
"id": 205,
"kind": "kind_value",
"label_fingerprint": "label_fingerprint_value",
"labels": {},
"last_start_timestamp": "last_start_timestamp_value",
"last_stop_timestamp": "last_stop_timestamp_value",
"last_suspended_timestamp": "last_suspended_timestamp_value",
"machine_type": "machine_type_value",
"metadata": {
"fingerprint": "fingerprint_value",
"items": [{"key": "key_value", "value": "value_value"}],
"kind": "kind_value",
},
"min_cpu_platform": "min_cpu_platform_value",
"name": "name_value",
"network_interfaces": [
{
"access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"alias_ip_ranges": [
{
"ip_cidr_range": "ip_cidr_range_value",
"subnetwork_range_name": "subnetwork_range_name_value",
}
],
"fingerprint": "fingerprint_value",
"ipv6_access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"ipv6_access_type": "ipv6_access_type_value",
"ipv6_address": "ipv6_address_value",
"kind": "kind_value",
"name": "name_value",
"network": "network_value",
"network_i_p": "network_i_p_value",
"nic_type": "nic_type_value",
"queue_count": 1197,
"stack_type": "stack_type_value",
"subnetwork": "subnetwork_value",
}
],
"private_ipv6_google_access": "private_ipv6_google_access_value",
"reservation_affinity": {
"consume_reservation_type": "consume_reservation_type_value",
"key": "key_value",
"values": ["values_value_1", "values_value_2"],
},
"resource_policies": ["resource_policies_value_1", "resource_policies_value_2"],
"satisfies_pzs": True,
"scheduling": {
"automatic_restart": True,
"location_hint": "location_hint_value",
"min_node_cpus": 1379,
"node_affinities": [
{
"key": "key_value",
"operator": "operator_value",
"values": ["values_value_1", "values_value_2"],
}
],
"on_host_maintenance": "on_host_maintenance_value",
"preemptible": True,
},
"self_link": "self_link_value",
"service_accounts": [
{"email": "email_value", "scopes": ["scopes_value_1", "scopes_value_2"]}
],
"shielded_instance_config": {
"enable_integrity_monitoring": True,
"enable_secure_boot": True,
"enable_vtpm": True,
},
"shielded_instance_integrity_policy": {"update_auto_learn_policy": True},
"start_restricted": True,
"status": "status_value",
"status_message": "status_message_value",
"tags": {
"fingerprint": "fingerprint_value",
"items": ["items_value_1", "items_value_2"],
},
"zone": "zone_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.insert_unary(request)
def test_insert_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "zone": "sample2"}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance_resource=compute.Instance(
advanced_machine_features=compute.AdvancedMachineFeatures(
enable_nested_virtualization=True
)
),
)
mock_args.update(sample_request)
client.insert_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances"
% client.transport._host,
args[1],
)
def test_insert_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.insert_unary(
compute.InsertInstanceRequest(),
project="project_value",
zone="zone_value",
instance_resource=compute.Instance(
advanced_machine_features=compute.AdvancedMachineFeatures(
enable_nested_virtualization=True
)
),
)
def test_insert_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.ListInstancesRequest, dict,])
def test_list_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceList(
id="id_value",
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListPager)
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
def test_list_rest_required_fields(request_type=compute.ListInstancesRequest):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
("max_results", "filter", "order_by", "page_token", "return_partial_success",)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.InstanceList()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_list_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.list._get_unset_required_fields({})
assert set(unset_fields) == (
set(("maxResults", "filter", "orderBy", "pageToken", "returnPartialSuccess",))
& set(("project", "zone",))
)
def test_list_rest_bad_request(
transport: str = "rest", request_type=compute.ListInstancesRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.list(request)
def test_list_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceList()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "zone": "sample2"}
# get truthy value for each flattened field
mock_args = dict(project="project_value", zone="zone_value",)
mock_args.update(sample_request)
client.list(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances"
% client.transport._host,
args[1],
)
def test_list_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list(
compute.ListInstancesRequest(), project="project_value", zone="zone_value",
)
def test_list_rest_pager(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.InstanceList(
items=[compute.Instance(), compute.Instance(), compute.Instance(),],
next_page_token="abc",
),
compute.InstanceList(items=[], next_page_token="def",),
compute.InstanceList(items=[compute.Instance(),], next_page_token="ghi",),
compute.InstanceList(items=[compute.Instance(), compute.Instance(),],),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.InstanceList.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {"project": "sample1", "zone": "sample2"}
pager = client.list(request=sample_request)
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.Instance) for i in results)
pages = list(client.list(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize("request_type", [compute.ListReferrersInstancesRequest, dict,])
def test_list_referrers_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceListReferrers(
id="id_value",
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceListReferrers.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list_referrers(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListReferrersPager)
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
def test_list_referrers_rest_required_fields(
request_type=compute.ListReferrersInstancesRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list_referrers._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list_referrers._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
("max_results", "filter", "order_by", "page_token", "return_partial_success",)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.InstanceListReferrers()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceListReferrers.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list_referrers(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_list_referrers_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.list_referrers._get_unset_required_fields({})
assert set(unset_fields) == (
set(("maxResults", "filter", "orderBy", "pageToken", "returnPartialSuccess",))
& set(("instance", "project", "zone",))
)
def test_list_referrers_rest_bad_request(
transport: str = "rest", request_type=compute.ListReferrersInstancesRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.list_referrers(request)
def test_list_referrers_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.InstanceListReferrers()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.InstanceListReferrers.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.list_referrers(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/referrers"
% client.transport._host,
args[1],
)
def test_list_referrers_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_referrers(
compute.ListReferrersInstancesRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_list_referrers_rest_pager(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.InstanceListReferrers(
items=[compute.Reference(), compute.Reference(), compute.Reference(),],
next_page_token="abc",
),
compute.InstanceListReferrers(items=[], next_page_token="def",),
compute.InstanceListReferrers(
items=[compute.Reference(),], next_page_token="ghi",
),
compute.InstanceListReferrers(
items=[compute.Reference(), compute.Reference(),],
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.InstanceListReferrers.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
pager = client.list_referrers(request=sample_request)
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.Reference) for i in results)
pages = list(client.list_referrers(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize(
"request_type", [compute.RemoveResourcePoliciesInstanceRequest, dict,]
)
def test_remove_resource_policies_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_remove_resource_policies_request_resource"] = {
"resource_policies": ["resource_policies_value_1", "resource_policies_value_2"]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.remove_resource_policies_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_remove_resource_policies_unary_rest_required_fields(
request_type=compute.RemoveResourcePoliciesInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).remove_resource_policies._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).remove_resource_policies._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.remove_resource_policies_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_remove_resource_policies_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.remove_resource_policies._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instance",
"instancesRemoveResourcePoliciesRequestResource",
"project",
"zone",
)
)
)
def test_remove_resource_policies_unary_rest_bad_request(
transport: str = "rest", request_type=compute.RemoveResourcePoliciesInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_remove_resource_policies_request_resource"] = {
"resource_policies": ["resource_policies_value_1", "resource_policies_value_2"]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.remove_resource_policies_unary(request)
def test_remove_resource_policies_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_remove_resource_policies_request_resource=compute.InstancesRemoveResourcePoliciesRequest(
resource_policies=["resource_policies_value"]
),
)
mock_args.update(sample_request)
client.remove_resource_policies_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/removeResourcePolicies"
% client.transport._host,
args[1],
)
def test_remove_resource_policies_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.remove_resource_policies_unary(
compute.RemoveResourcePoliciesInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_remove_resource_policies_request_resource=compute.InstancesRemoveResourcePoliciesRequest(
resource_policies=["resource_policies_value"]
),
)
def test_remove_resource_policies_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.ResetInstanceRequest, dict,])
def test_reset_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.reset_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_reset_unary_rest_required_fields(request_type=compute.ResetInstanceRequest):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).reset._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).reset._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.reset_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_reset_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.reset._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",)) & set(("instance", "project", "zone",))
)
def test_reset_unary_rest_bad_request(
transport: str = "rest", request_type=compute.ResetInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.reset_unary(request)
def test_reset_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.reset_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/reset"
% client.transport._host,
args[1],
)
def test_reset_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.reset_unary(
compute.ResetInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_reset_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.SendDiagnosticInterruptInstanceRequest, dict,]
)
def test_send_diagnostic_interrupt_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SendDiagnosticInterruptInstanceResponse()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.SendDiagnosticInterruptInstanceResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.send_diagnostic_interrupt(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.SendDiagnosticInterruptInstanceResponse)
def test_send_diagnostic_interrupt_rest_required_fields(
request_type=compute.SendDiagnosticInterruptInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).send_diagnostic_interrupt._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).send_diagnostic_interrupt._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.SendDiagnosticInterruptInstanceResponse()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.SendDiagnosticInterruptInstanceResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.send_diagnostic_interrupt(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_send_diagnostic_interrupt_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.send_diagnostic_interrupt._get_unset_required_fields({})
assert set(unset_fields) == (set(()) & set(("instance", "project", "zone",)))
def test_send_diagnostic_interrupt_rest_bad_request(
transport: str = "rest", request_type=compute.SendDiagnosticInterruptInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.send_diagnostic_interrupt(request)
def test_send_diagnostic_interrupt_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.SendDiagnosticInterruptInstanceResponse()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.SendDiagnosticInterruptInstanceResponse.to_json(
return_value
)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.send_diagnostic_interrupt(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/sendDiagnosticInterrupt"
% client.transport._host,
args[1],
)
def test_send_diagnostic_interrupt_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.send_diagnostic_interrupt(
compute.SendDiagnosticInterruptInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_send_diagnostic_interrupt_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.SetDeletionProtectionInstanceRequest, dict,]
)
def test_set_deletion_protection_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_deletion_protection_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_deletion_protection_unary_rest_required_fields(
request_type=compute.SetDeletionProtectionInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["project"] = ""
request_init["resource"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_deletion_protection._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["resource"] = "resource_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_deletion_protection._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("deletion_protection", "request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "resource" in jsonified_request
assert jsonified_request["resource"] == "resource_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_deletion_protection_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_deletion_protection_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_deletion_protection._get_unset_required_fields({})
assert set(unset_fields) == (
set(("deletionProtection", "requestId",))
& set(("project", "resource", "zone",))
)
def test_set_deletion_protection_unary_rest_bad_request(
transport: str = "rest", request_type=compute.SetDeletionProtectionInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_deletion_protection_unary(request)
def test_set_deletion_protection_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"resource": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", resource="resource_value",
)
mock_args.update(sample_request)
client.set_deletion_protection_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setDeletionProtection"
% client.transport._host,
args[1],
)
def test_set_deletion_protection_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_deletion_protection_unary(
compute.SetDeletionProtectionInstanceRequest(),
project="project_value",
zone="zone_value",
resource="resource_value",
)
def test_set_deletion_protection_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.SetDiskAutoDeleteInstanceRequest, dict,]
)
def test_set_disk_auto_delete_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_disk_auto_delete_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_disk_auto_delete_unary_rest_required_fields(
request_type=compute.SetDiskAutoDeleteInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["auto_delete"] = False
request_init["device_name"] = ""
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
assert "autoDelete" not in jsonified_request
assert "deviceName" not in jsonified_request
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_disk_auto_delete._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
assert "autoDelete" in jsonified_request
assert jsonified_request["autoDelete"] == request_init["auto_delete"]
assert "deviceName" in jsonified_request
assert jsonified_request["deviceName"] == request_init["device_name"]
jsonified_request["autoDelete"] = True
jsonified_request["deviceName"] = "device_name_value"
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_disk_auto_delete._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("device_name", "request_id", "auto_delete",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "autoDelete" in jsonified_request
assert jsonified_request["autoDelete"] == True
assert "deviceName" in jsonified_request
assert jsonified_request["deviceName"] == "device_name_value"
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_disk_auto_delete_unary(request)
expected_params = [
("autoDelete", False,),
("deviceName", "",),
]
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_disk_auto_delete_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_disk_auto_delete._get_unset_required_fields({})
assert set(unset_fields) == (
set(("deviceName", "requestId", "autoDelete",))
& set(("autoDelete", "deviceName", "instance", "project", "zone",))
)
def test_set_disk_auto_delete_unary_rest_bad_request(
transport: str = "rest", request_type=compute.SetDiskAutoDeleteInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_disk_auto_delete_unary(request)
def test_set_disk_auto_delete_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
auto_delete=True,
device_name="device_name_value",
)
mock_args.update(sample_request)
client.set_disk_auto_delete_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setDiskAutoDelete"
% client.transport._host,
args[1],
)
def test_set_disk_auto_delete_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_disk_auto_delete_unary(
compute.SetDiskAutoDeleteInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
auto_delete=True,
device_name="device_name_value",
)
def test_set_disk_auto_delete_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.SetIamPolicyInstanceRequest, dict,])
def test_set_iam_policy_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request_init["zone_set_policy_request_resource"] = {
"bindings": [
{
"binding_id": "binding_id_value",
"condition": {
"description": "description_value",
"expression": "expression_value",
"location": "location_value",
"title": "title_value",
},
"members": ["members_value_1", "members_value_2"],
"role": "role_value",
}
],
"etag": "etag_value",
"policy": {
"audit_configs": [
{
"audit_log_configs": [
{
"exempted_members": [
"exempted_members_value_1",
"exempted_members_value_2",
],
"ignore_child_exemptions": True,
"log_type": "log_type_value",
}
],
"exempted_members": [
"exempted_members_value_1",
"exempted_members_value_2",
],
"service": "service_value",
}
],
"bindings": [
{
"binding_id": "binding_id_value",
"condition": {
"description": "description_value",
"expression": "expression_value",
"location": "location_value",
"title": "title_value",
},
"members": ["members_value_1", "members_value_2"],
"role": "role_value",
}
],
"etag": "etag_value",
"iam_owned": True,
"rules": [
{
"action": "action_value",
"conditions": [
{
"iam": "iam_value",
"op": "op_value",
"svc": "svc_value",
"sys": "sys_value",
"values": ["values_value_1", "values_value_2"],
}
],
"description": "description_value",
"ins": ["ins_value_1", "ins_value_2"],
"log_configs": [
{
"cloud_audit": {
"authorization_logging_options": {
"permission_type": "permission_type_value"
},
"log_name": "log_name_value",
},
"counter": {
"custom_fields": [
{"name": "name_value", "value": "value_value"}
],
"field": "field_value",
"metric": "metric_value",
},
"data_access": {"log_mode": "log_mode_value"},
}
],
"not_ins": ["not_ins_value_1", "not_ins_value_2"],
"permissions": ["permissions_value_1", "permissions_value_2"],
}
],
"version": 774,
},
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Policy.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_iam_policy(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Policy)
assert response.etag == "etag_value"
assert response.iam_owned is True
assert response.version == 774
def test_set_iam_policy_rest_required_fields(
request_type=compute.SetIamPolicyInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["project"] = ""
request_init["resource"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_iam_policy._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["resource"] = "resource_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_iam_policy._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "resource" in jsonified_request
assert jsonified_request["resource"] == "resource_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Policy()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Policy.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_iam_policy(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_iam_policy_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_iam_policy._get_unset_required_fields({})
assert set(unset_fields) == (
set(()) & set(("project", "resource", "zone", "zoneSetPolicyRequestResource",))
)
def test_set_iam_policy_rest_bad_request(
transport: str = "rest", request_type=compute.SetIamPolicyInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request_init["zone_set_policy_request_resource"] = {
"bindings": [
{
"binding_id": "binding_id_value",
"condition": {
"description": "description_value",
"expression": "expression_value",
"location": "location_value",
"title": "title_value",
},
"members": ["members_value_1", "members_value_2"],
"role": "role_value",
}
],
"etag": "etag_value",
"policy": {
"audit_configs": [
{
"audit_log_configs": [
{
"exempted_members": [
"exempted_members_value_1",
"exempted_members_value_2",
],
"ignore_child_exemptions": True,
"log_type": "log_type_value",
}
],
"exempted_members": [
"exempted_members_value_1",
"exempted_members_value_2",
],
"service": "service_value",
}
],
"bindings": [
{
"binding_id": "binding_id_value",
"condition": {
"description": "description_value",
"expression": "expression_value",
"location": "location_value",
"title": "title_value",
},
"members": ["members_value_1", "members_value_2"],
"role": "role_value",
}
],
"etag": "etag_value",
"iam_owned": True,
"rules": [
{
"action": "action_value",
"conditions": [
{
"iam": "iam_value",
"op": "op_value",
"svc": "svc_value",
"sys": "sys_value",
"values": ["values_value_1", "values_value_2"],
}
],
"description": "description_value",
"ins": ["ins_value_1", "ins_value_2"],
"log_configs": [
{
"cloud_audit": {
"authorization_logging_options": {
"permission_type": "permission_type_value"
},
"log_name": "log_name_value",
},
"counter": {
"custom_fields": [
{"name": "name_value", "value": "value_value"}
],
"field": "field_value",
"metric": "metric_value",
},
"data_access": {"log_mode": "log_mode_value"},
}
],
"not_ins": ["not_ins_value_1", "not_ins_value_2"],
"permissions": ["permissions_value_1", "permissions_value_2"],
}
],
"version": 774,
},
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_iam_policy(request)
def test_set_iam_policy_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Policy()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Policy.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"resource": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
resource="resource_value",
zone_set_policy_request_resource=compute.ZoneSetPolicyRequest(
bindings=[compute.Binding(binding_id="binding_id_value")]
),
)
mock_args.update(sample_request)
client.set_iam_policy(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setIamPolicy"
% client.transport._host,
args[1],
)
def test_set_iam_policy_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_iam_policy(
compute.SetIamPolicyInstanceRequest(),
project="project_value",
zone="zone_value",
resource="resource_value",
zone_set_policy_request_resource=compute.ZoneSetPolicyRequest(
bindings=[compute.Binding(binding_id="binding_id_value")]
),
)
def test_set_iam_policy_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.SetLabelsInstanceRequest, dict,])
def test_set_labels_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_set_labels_request_resource"] = {
"label_fingerprint": "label_fingerprint_value",
"labels": {},
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_labels_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_labels_unary_rest_required_fields(
request_type=compute.SetLabelsInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_labels._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_labels._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_labels_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_labels_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_labels._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(("instance", "instancesSetLabelsRequestResource", "project", "zone",))
)
def test_set_labels_unary_rest_bad_request(
transport: str = "rest", request_type=compute.SetLabelsInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_set_labels_request_resource"] = {
"label_fingerprint": "label_fingerprint_value",
"labels": {},
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_labels_unary(request)
def test_set_labels_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_labels_request_resource=compute.InstancesSetLabelsRequest(
label_fingerprint="label_fingerprint_value"
),
)
mock_args.update(sample_request)
client.set_labels_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setLabels"
% client.transport._host,
args[1],
)
def test_set_labels_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_labels_unary(
compute.SetLabelsInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_labels_request_resource=compute.InstancesSetLabelsRequest(
label_fingerprint="label_fingerprint_value"
),
)
def test_set_labels_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.SetMachineResourcesInstanceRequest, dict,]
)
def test_set_machine_resources_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_set_machine_resources_request_resource"] = {
"guest_accelerators": [
{"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"}
]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_machine_resources_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_machine_resources_unary_rest_required_fields(
request_type=compute.SetMachineResourcesInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_machine_resources._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_machine_resources._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_machine_resources_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_machine_resources_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_machine_resources._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instance",
"instancesSetMachineResourcesRequestResource",
"project",
"zone",
)
)
)
def test_set_machine_resources_unary_rest_bad_request(
transport: str = "rest", request_type=compute.SetMachineResourcesInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_set_machine_resources_request_resource"] = {
"guest_accelerators": [
{"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"}
]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_machine_resources_unary(request)
def test_set_machine_resources_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_machine_resources_request_resource=compute.InstancesSetMachineResourcesRequest(
guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]
),
)
mock_args.update(sample_request)
client.set_machine_resources_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineResources"
% client.transport._host,
args[1],
)
def test_set_machine_resources_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_machine_resources_unary(
compute.SetMachineResourcesInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_machine_resources_request_resource=compute.InstancesSetMachineResourcesRequest(
guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)]
),
)
def test_set_machine_resources_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.SetMachineTypeInstanceRequest, dict,])
def test_set_machine_type_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_set_machine_type_request_resource"] = {
"machine_type": "machine_type_value"
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_machine_type_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_machine_type_unary_rest_required_fields(
request_type=compute.SetMachineTypeInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_machine_type._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_machine_type._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_machine_type_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_machine_type_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_machine_type._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
("instance", "instancesSetMachineTypeRequestResource", "project", "zone",)
)
)
def test_set_machine_type_unary_rest_bad_request(
transport: str = "rest", request_type=compute.SetMachineTypeInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_set_machine_type_request_resource"] = {
"machine_type": "machine_type_value"
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_machine_type_unary(request)
def test_set_machine_type_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_machine_type_request_resource=compute.InstancesSetMachineTypeRequest(
machine_type="machine_type_value"
),
)
mock_args.update(sample_request)
client.set_machine_type_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineType"
% client.transport._host,
args[1],
)
def test_set_machine_type_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_machine_type_unary(
compute.SetMachineTypeInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_machine_type_request_resource=compute.InstancesSetMachineTypeRequest(
machine_type="machine_type_value"
),
)
def test_set_machine_type_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.SetMetadataInstanceRequest, dict,])
def test_set_metadata_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["metadata_resource"] = {
"fingerprint": "fingerprint_value",
"items": [{"key": "key_value", "value": "value_value"}],
"kind": "kind_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_metadata_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_metadata_unary_rest_required_fields(
request_type=compute.SetMetadataInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_metadata._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_metadata._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_metadata_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_metadata_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_metadata._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",)) & set(("instance", "metadataResource", "project", "zone",))
)
def test_set_metadata_unary_rest_bad_request(
transport: str = "rest", request_type=compute.SetMetadataInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["metadata_resource"] = {
"fingerprint": "fingerprint_value",
"items": [{"key": "key_value", "value": "value_value"}],
"kind": "kind_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_metadata_unary(request)
def test_set_metadata_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
metadata_resource=compute.Metadata(fingerprint="fingerprint_value"),
)
mock_args.update(sample_request)
client.set_metadata_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMetadata"
% client.transport._host,
args[1],
)
def test_set_metadata_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_metadata_unary(
compute.SetMetadataInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
metadata_resource=compute.Metadata(fingerprint="fingerprint_value"),
)
def test_set_metadata_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.SetMinCpuPlatformInstanceRequest, dict,]
)
def test_set_min_cpu_platform_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_set_min_cpu_platform_request_resource"] = {
"min_cpu_platform": "min_cpu_platform_value"
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_min_cpu_platform_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_min_cpu_platform_unary_rest_required_fields(
request_type=compute.SetMinCpuPlatformInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_min_cpu_platform._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_min_cpu_platform._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_min_cpu_platform_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_min_cpu_platform_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_min_cpu_platform._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instance",
"instancesSetMinCpuPlatformRequestResource",
"project",
"zone",
)
)
)
def test_set_min_cpu_platform_unary_rest_bad_request(
transport: str = "rest", request_type=compute.SetMinCpuPlatformInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_set_min_cpu_platform_request_resource"] = {
"min_cpu_platform": "min_cpu_platform_value"
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_min_cpu_platform_unary(request)
def test_set_min_cpu_platform_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_min_cpu_platform_request_resource=compute.InstancesSetMinCpuPlatformRequest(
min_cpu_platform="min_cpu_platform_value"
),
)
mock_args.update(sample_request)
client.set_min_cpu_platform_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMinCpuPlatform"
% client.transport._host,
args[1],
)
def test_set_min_cpu_platform_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_min_cpu_platform_unary(
compute.SetMinCpuPlatformInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_min_cpu_platform_request_resource=compute.InstancesSetMinCpuPlatformRequest(
min_cpu_platform="min_cpu_platform_value"
),
)
def test_set_min_cpu_platform_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.SetSchedulingInstanceRequest, dict,])
def test_set_scheduling_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["scheduling_resource"] = {
"automatic_restart": True,
"location_hint": "location_hint_value",
"min_node_cpus": 1379,
"node_affinities": [
{
"key": "key_value",
"operator": "operator_value",
"values": ["values_value_1", "values_value_2"],
}
],
"on_host_maintenance": "on_host_maintenance_value",
"preemptible": True,
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_scheduling_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_scheduling_unary_rest_required_fields(
request_type=compute.SetSchedulingInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_scheduling._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_scheduling._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_scheduling_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_scheduling_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_scheduling._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(("instance", "project", "schedulingResource", "zone",))
)
def test_set_scheduling_unary_rest_bad_request(
transport: str = "rest", request_type=compute.SetSchedulingInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["scheduling_resource"] = {
"automatic_restart": True,
"location_hint": "location_hint_value",
"min_node_cpus": 1379,
"node_affinities": [
{
"key": "key_value",
"operator": "operator_value",
"values": ["values_value_1", "values_value_2"],
}
],
"on_host_maintenance": "on_host_maintenance_value",
"preemptible": True,
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_scheduling_unary(request)
def test_set_scheduling_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
scheduling_resource=compute.Scheduling(automatic_restart=True),
)
mock_args.update(sample_request)
client.set_scheduling_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setScheduling"
% client.transport._host,
args[1],
)
def test_set_scheduling_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_scheduling_unary(
compute.SetSchedulingInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
scheduling_resource=compute.Scheduling(automatic_restart=True),
)
def test_set_scheduling_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.SetServiceAccountInstanceRequest, dict,]
)
def test_set_service_account_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_set_service_account_request_resource"] = {
"email": "email_value",
"scopes": ["scopes_value_1", "scopes_value_2"],
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_service_account_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_service_account_unary_rest_required_fields(
request_type=compute.SetServiceAccountInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_service_account._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_service_account._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_service_account_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_service_account_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_service_account._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instance",
"instancesSetServiceAccountRequestResource",
"project",
"zone",
)
)
)
def test_set_service_account_unary_rest_bad_request(
transport: str = "rest", request_type=compute.SetServiceAccountInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_set_service_account_request_resource"] = {
"email": "email_value",
"scopes": ["scopes_value_1", "scopes_value_2"],
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_service_account_unary(request)
def test_set_service_account_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest(
email="email_value"
),
)
mock_args.update(sample_request)
client.set_service_account_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount"
% client.transport._host,
args[1],
)
def test_set_service_account_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_service_account_unary(
compute.SetServiceAccountInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest(
email="email_value"
),
)
def test_set_service_account_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, dict,]
)
def test_set_shielded_instance_integrity_policy_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["shielded_instance_integrity_policy_resource"] = {
"update_auto_learn_policy": True
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_shielded_instance_integrity_policy_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_shielded_instance_integrity_policy_unary_rest_required_fields(
request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_shielded_instance_integrity_policy._get_unset_required_fields(
jsonified_request
)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_shielded_instance_integrity_policy._get_unset_required_fields(
jsonified_request
)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "patch",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_shielded_instance_integrity_policy_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_shielded_instance_integrity_policy_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_shielded_instance_integrity_policy._get_unset_required_fields(
{}
)
assert set(unset_fields) == (
set(("requestId",))
& set(
("instance", "project", "shieldedInstanceIntegrityPolicyResource", "zone",)
)
)
def test_set_shielded_instance_integrity_policy_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest,
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["shielded_instance_integrity_policy_resource"] = {
"update_auto_learn_policy": True
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_shielded_instance_integrity_policy_unary(request)
def test_set_shielded_instance_integrity_policy_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
shielded_instance_integrity_policy_resource=compute.ShieldedInstanceIntegrityPolicy(
update_auto_learn_policy=True
),
)
mock_args.update(sample_request)
client.set_shielded_instance_integrity_policy_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setShieldedInstanceIntegrityPolicy"
% client.transport._host,
args[1],
)
def test_set_shielded_instance_integrity_policy_unary_rest_flattened_error(
transport: str = "rest",
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_shielded_instance_integrity_policy_unary(
compute.SetShieldedInstanceIntegrityPolicyInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
shielded_instance_integrity_policy_resource=compute.ShieldedInstanceIntegrityPolicy(
update_auto_learn_policy=True
),
)
def test_set_shielded_instance_integrity_policy_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.SetTagsInstanceRequest, dict,])
def test_set_tags_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["tags_resource"] = {
"fingerprint": "fingerprint_value",
"items": ["items_value_1", "items_value_2"],
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_tags_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_set_tags_unary_rest_required_fields(
request_type=compute.SetTagsInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_tags._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).set_tags._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.set_tags_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_set_tags_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.set_tags._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",)) & set(("instance", "project", "tagsResource", "zone",))
)
def test_set_tags_unary_rest_bad_request(
transport: str = "rest", request_type=compute.SetTagsInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["tags_resource"] = {
"fingerprint": "fingerprint_value",
"items": ["items_value_1", "items_value_2"],
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.set_tags_unary(request)
def test_set_tags_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
tags_resource=compute.Tags(fingerprint="fingerprint_value"),
)
mock_args.update(sample_request)
client.set_tags_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setTags"
% client.transport._host,
args[1],
)
def test_set_tags_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_tags_unary(
compute.SetTagsInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
tags_resource=compute.Tags(fingerprint="fingerprint_value"),
)
def test_set_tags_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.SimulateMaintenanceEventInstanceRequest, dict,]
)
def test_simulate_maintenance_event_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.simulate_maintenance_event_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_simulate_maintenance_event_unary_rest_required_fields(
request_type=compute.SimulateMaintenanceEventInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).simulate_maintenance_event._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).simulate_maintenance_event._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.simulate_maintenance_event_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_simulate_maintenance_event_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.simulate_maintenance_event._get_unset_required_fields({})
assert set(unset_fields) == (set(()) & set(("instance", "project", "zone",)))
def test_simulate_maintenance_event_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.SimulateMaintenanceEventInstanceRequest,
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.simulate_maintenance_event_unary(request)
def test_simulate_maintenance_event_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.simulate_maintenance_event_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/simulateMaintenanceEvent"
% client.transport._host,
args[1],
)
def test_simulate_maintenance_event_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.simulate_maintenance_event_unary(
compute.SimulateMaintenanceEventInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_simulate_maintenance_event_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.StartInstanceRequest, dict,])
def test_start_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.start_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_start_unary_rest_required_fields(request_type=compute.StartInstanceRequest):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).start._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).start._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.start_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_start_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.start._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",)) & set(("instance", "project", "zone",))
)
def test_start_unary_rest_bad_request(
transport: str = "rest", request_type=compute.StartInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.start_unary(request)
def test_start_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.start_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/start"
% client.transport._host,
args[1],
)
def test_start_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.start_unary(
compute.StartInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_start_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.StartWithEncryptionKeyInstanceRequest, dict,]
)
def test_start_with_encryption_key_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_start_with_encryption_key_request_resource"] = {
"disks": [
{
"disk_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"source": "source_value",
}
]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.start_with_encryption_key_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_start_with_encryption_key_unary_rest_required_fields(
request_type=compute.StartWithEncryptionKeyInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).start_with_encryption_key._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).start_with_encryption_key._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.start_with_encryption_key_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_start_with_encryption_key_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.start_with_encryption_key._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(
(
"instance",
"instancesStartWithEncryptionKeyRequestResource",
"project",
"zone",
)
)
)
def test_start_with_encryption_key_unary_rest_bad_request(
transport: str = "rest", request_type=compute.StartWithEncryptionKeyInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instances_start_with_encryption_key_request_resource"] = {
"disks": [
{
"disk_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"source": "source_value",
}
]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.start_with_encryption_key_unary(request)
def test_start_with_encryption_key_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instances_start_with_encryption_key_request_resource=compute.InstancesStartWithEncryptionKeyRequest(
disks=[
compute.CustomerEncryptionKeyProtectedDisk(
disk_encryption_key=compute.CustomerEncryptionKey(
kms_key_name="kms_key_name_value"
)
)
]
),
)
mock_args.update(sample_request)
client.start_with_encryption_key_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/startWithEncryptionKey"
% client.transport._host,
args[1],
)
def test_start_with_encryption_key_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.start_with_encryption_key_unary(
compute.StartWithEncryptionKeyInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instances_start_with_encryption_key_request_resource=compute.InstancesStartWithEncryptionKeyRequest(
disks=[
compute.CustomerEncryptionKeyProtectedDisk(
disk_encryption_key=compute.CustomerEncryptionKey(
kms_key_name="kms_key_name_value"
)
)
]
),
)
def test_start_with_encryption_key_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.StopInstanceRequest, dict,])
def test_stop_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.stop_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_stop_unary_rest_required_fields(request_type=compute.StopInstanceRequest):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).stop._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).stop._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.stop_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_stop_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.stop._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",)) & set(("instance", "project", "zone",))
)
def test_stop_unary_rest_bad_request(
transport: str = "rest", request_type=compute.StopInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.stop_unary(request)
def test_stop_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", instance="instance_value",
)
mock_args.update(sample_request)
client.stop_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/stop"
% client.transport._host,
args[1],
)
def test_stop_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.stop_unary(
compute.StopInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
)
def test_stop_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.TestIamPermissionsInstanceRequest, dict,]
)
def test_test_iam_permissions_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request_init["test_permissions_request_resource"] = {
"permissions": ["permissions_value_1", "permissions_value_2"]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.TestPermissionsResponse(
permissions=["permissions_value"],
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.TestPermissionsResponse.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.test_iam_permissions(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.TestPermissionsResponse)
assert response.permissions == ["permissions_value"]
def test_test_iam_permissions_rest_required_fields(
request_type=compute.TestIamPermissionsInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["project"] = ""
request_init["resource"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).test_iam_permissions._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["resource"] = "resource_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).test_iam_permissions._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "resource" in jsonified_request
assert jsonified_request["resource"] == "resource_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.TestPermissionsResponse()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.TestPermissionsResponse.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.test_iam_permissions(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_test_iam_permissions_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.test_iam_permissions._get_unset_required_fields({})
assert set(unset_fields) == (
set(())
& set(("project", "resource", "testPermissionsRequestResource", "zone",))
)
def test_test_iam_permissions_rest_bad_request(
transport: str = "rest", request_type=compute.TestIamPermissionsInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"}
request_init["test_permissions_request_resource"] = {
"permissions": ["permissions_value_1", "permissions_value_2"]
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.test_iam_permissions(request)
def test_test_iam_permissions_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.TestPermissionsResponse()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.TestPermissionsResponse.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"resource": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
resource="resource_value",
test_permissions_request_resource=compute.TestPermissionsRequest(
permissions=["permissions_value"]
),
)
mock_args.update(sample_request)
client.test_iam_permissions(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/testIamPermissions"
% client.transport._host,
args[1],
)
def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.test_iam_permissions(
compute.TestIamPermissionsInstanceRequest(),
project="project_value",
zone="zone_value",
resource="resource_value",
test_permissions_request_resource=compute.TestPermissionsRequest(
permissions=["permissions_value"]
),
)
def test_test_iam_permissions_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.UpdateInstanceRequest, dict,])
def test_update_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instance_resource"] = {
"advanced_machine_features": {
"enable_nested_virtualization": True,
"threads_per_core": 1689,
},
"can_ip_forward": True,
"confidential_instance_config": {"enable_confidential_compute": True},
"cpu_platform": "cpu_platform_value",
"creation_timestamp": "creation_timestamp_value",
"deletion_protection": True,
"description": "description_value",
"disks": [
{
"auto_delete": True,
"boot": True,
"device_name": "device_name_value",
"disk_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"disk_size_gb": 1261,
"guest_os_features": [{"type_": "type__value"}],
"index": 536,
"initialize_params": {
"description": "description_value",
"disk_name": "disk_name_value",
"disk_size_gb": 1261,
"disk_type": "disk_type_value",
"labels": {},
"on_update_action": "on_update_action_value",
"provisioned_iops": 1740,
"resource_policies": [
"resource_policies_value_1",
"resource_policies_value_2",
],
"source_image": "source_image_value",
"source_image_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"source_snapshot": "source_snapshot_value",
"source_snapshot_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
},
"interface": "interface_value",
"kind": "kind_value",
"licenses": ["licenses_value_1", "licenses_value_2"],
"mode": "mode_value",
"shielded_instance_initial_state": {
"dbs": [
{"content": "content_value", "file_type": "file_type_value"}
],
"dbxs": [
{"content": "content_value", "file_type": "file_type_value"}
],
"keks": [
{"content": "content_value", "file_type": "file_type_value"}
],
"pk": {"content": "content_value", "file_type": "file_type_value"},
},
"source": "source_value",
"type_": "type__value",
}
],
"display_device": {"enable_display": True},
"fingerprint": "fingerprint_value",
"guest_accelerators": [
{"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"}
],
"hostname": "hostname_value",
"id": 205,
"kind": "kind_value",
"label_fingerprint": "label_fingerprint_value",
"labels": {},
"last_start_timestamp": "last_start_timestamp_value",
"last_stop_timestamp": "last_stop_timestamp_value",
"last_suspended_timestamp": "last_suspended_timestamp_value",
"machine_type": "machine_type_value",
"metadata": {
"fingerprint": "fingerprint_value",
"items": [{"key": "key_value", "value": "value_value"}],
"kind": "kind_value",
},
"min_cpu_platform": "min_cpu_platform_value",
"name": "name_value",
"network_interfaces": [
{
"access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"alias_ip_ranges": [
{
"ip_cidr_range": "ip_cidr_range_value",
"subnetwork_range_name": "subnetwork_range_name_value",
}
],
"fingerprint": "fingerprint_value",
"ipv6_access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"ipv6_access_type": "ipv6_access_type_value",
"ipv6_address": "ipv6_address_value",
"kind": "kind_value",
"name": "name_value",
"network": "network_value",
"network_i_p": "network_i_p_value",
"nic_type": "nic_type_value",
"queue_count": 1197,
"stack_type": "stack_type_value",
"subnetwork": "subnetwork_value",
}
],
"private_ipv6_google_access": "private_ipv6_google_access_value",
"reservation_affinity": {
"consume_reservation_type": "consume_reservation_type_value",
"key": "key_value",
"values": ["values_value_1", "values_value_2"],
},
"resource_policies": ["resource_policies_value_1", "resource_policies_value_2"],
"satisfies_pzs": True,
"scheduling": {
"automatic_restart": True,
"location_hint": "location_hint_value",
"min_node_cpus": 1379,
"node_affinities": [
{
"key": "key_value",
"operator": "operator_value",
"values": ["values_value_1", "values_value_2"],
}
],
"on_host_maintenance": "on_host_maintenance_value",
"preemptible": True,
},
"self_link": "self_link_value",
"service_accounts": [
{"email": "email_value", "scopes": ["scopes_value_1", "scopes_value_2"]}
],
"shielded_instance_config": {
"enable_integrity_monitoring": True,
"enable_secure_boot": True,
"enable_vtpm": True,
},
"shielded_instance_integrity_policy": {"update_auto_learn_policy": True},
"start_restricted": True,
"status": "status_value",
"status_message": "status_message_value",
"tags": {
"fingerprint": "fingerprint_value",
"items": ["items_value_1", "items_value_2"],
},
"zone": "zone_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_update_unary_rest_required_fields(request_type=compute.UpdateInstanceRequest):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
("minimal_action", "request_id", "most_disruptive_allowed_action",)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "put",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_update_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.update._get_unset_required_fields({})
assert set(unset_fields) == (
set(("minimalAction", "requestId", "mostDisruptiveAllowedAction",))
& set(("instance", "instanceResource", "project", "zone",))
)
def test_update_unary_rest_bad_request(
transport: str = "rest", request_type=compute.UpdateInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["instance_resource"] = {
"advanced_machine_features": {
"enable_nested_virtualization": True,
"threads_per_core": 1689,
},
"can_ip_forward": True,
"confidential_instance_config": {"enable_confidential_compute": True},
"cpu_platform": "cpu_platform_value",
"creation_timestamp": "creation_timestamp_value",
"deletion_protection": True,
"description": "description_value",
"disks": [
{
"auto_delete": True,
"boot": True,
"device_name": "device_name_value",
"disk_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"disk_size_gb": 1261,
"guest_os_features": [{"type_": "type__value"}],
"index": 536,
"initialize_params": {
"description": "description_value",
"disk_name": "disk_name_value",
"disk_size_gb": 1261,
"disk_type": "disk_type_value",
"labels": {},
"on_update_action": "on_update_action_value",
"provisioned_iops": 1740,
"resource_policies": [
"resource_policies_value_1",
"resource_policies_value_2",
],
"source_image": "source_image_value",
"source_image_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
"source_snapshot": "source_snapshot_value",
"source_snapshot_encryption_key": {
"kms_key_name": "kms_key_name_value",
"kms_key_service_account": "kms_key_service_account_value",
"raw_key": "raw_key_value",
"rsa_encrypted_key": "rsa_encrypted_key_value",
"sha256": "sha256_value",
},
},
"interface": "interface_value",
"kind": "kind_value",
"licenses": ["licenses_value_1", "licenses_value_2"],
"mode": "mode_value",
"shielded_instance_initial_state": {
"dbs": [
{"content": "content_value", "file_type": "file_type_value"}
],
"dbxs": [
{"content": "content_value", "file_type": "file_type_value"}
],
"keks": [
{"content": "content_value", "file_type": "file_type_value"}
],
"pk": {"content": "content_value", "file_type": "file_type_value"},
},
"source": "source_value",
"type_": "type__value",
}
],
"display_device": {"enable_display": True},
"fingerprint": "fingerprint_value",
"guest_accelerators": [
{"accelerator_count": 1805, "accelerator_type": "accelerator_type_value"}
],
"hostname": "hostname_value",
"id": 205,
"kind": "kind_value",
"label_fingerprint": "label_fingerprint_value",
"labels": {},
"last_start_timestamp": "last_start_timestamp_value",
"last_stop_timestamp": "last_stop_timestamp_value",
"last_suspended_timestamp": "last_suspended_timestamp_value",
"machine_type": "machine_type_value",
"metadata": {
"fingerprint": "fingerprint_value",
"items": [{"key": "key_value", "value": "value_value"}],
"kind": "kind_value",
},
"min_cpu_platform": "min_cpu_platform_value",
"name": "name_value",
"network_interfaces": [
{
"access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"alias_ip_ranges": [
{
"ip_cidr_range": "ip_cidr_range_value",
"subnetwork_range_name": "subnetwork_range_name_value",
}
],
"fingerprint": "fingerprint_value",
"ipv6_access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"ipv6_access_type": "ipv6_access_type_value",
"ipv6_address": "ipv6_address_value",
"kind": "kind_value",
"name": "name_value",
"network": "network_value",
"network_i_p": "network_i_p_value",
"nic_type": "nic_type_value",
"queue_count": 1197,
"stack_type": "stack_type_value",
"subnetwork": "subnetwork_value",
}
],
"private_ipv6_google_access": "private_ipv6_google_access_value",
"reservation_affinity": {
"consume_reservation_type": "consume_reservation_type_value",
"key": "key_value",
"values": ["values_value_1", "values_value_2"],
},
"resource_policies": ["resource_policies_value_1", "resource_policies_value_2"],
"satisfies_pzs": True,
"scheduling": {
"automatic_restart": True,
"location_hint": "location_hint_value",
"min_node_cpus": 1379,
"node_affinities": [
{
"key": "key_value",
"operator": "operator_value",
"values": ["values_value_1", "values_value_2"],
}
],
"on_host_maintenance": "on_host_maintenance_value",
"preemptible": True,
},
"self_link": "self_link_value",
"service_accounts": [
{"email": "email_value", "scopes": ["scopes_value_1", "scopes_value_2"]}
],
"shielded_instance_config": {
"enable_integrity_monitoring": True,
"enable_secure_boot": True,
"enable_vtpm": True,
},
"shielded_instance_integrity_policy": {"update_auto_learn_policy": True},
"start_restricted": True,
"status": "status_value",
"status_message": "status_message_value",
"tags": {
"fingerprint": "fingerprint_value",
"items": ["items_value_1", "items_value_2"],
},
"zone": "zone_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.update_unary(request)
def test_update_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
instance_resource=compute.Instance(
advanced_machine_features=compute.AdvancedMachineFeatures(
enable_nested_virtualization=True
)
),
)
mock_args.update(sample_request)
client.update_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}"
% client.transport._host,
args[1],
)
def test_update_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_unary(
compute.UpdateInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
instance_resource=compute.Instance(
advanced_machine_features=compute.AdvancedMachineFeatures(
enable_nested_virtualization=True
)
),
)
def test_update_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.UpdateAccessConfigInstanceRequest, dict,]
)
def test_update_access_config_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["access_config_resource"] = {
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_access_config_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_update_access_config_unary_rest_required_fields(
request_type=compute.UpdateAccessConfigInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["network_interface"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
assert "networkInterface" not in jsonified_request
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update_access_config._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
assert "networkInterface" in jsonified_request
assert jsonified_request["networkInterface"] == request_init["network_interface"]
jsonified_request["instance"] = "instance_value"
jsonified_request["networkInterface"] = "network_interface_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update_access_config._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id", "network_interface",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "networkInterface" in jsonified_request
assert jsonified_request["networkInterface"] == "network_interface_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "post",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_access_config_unary(request)
expected_params = [
("networkInterface", "",),
]
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_update_access_config_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.update_access_config._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId", "networkInterface",))
& set(
("accessConfigResource", "instance", "networkInterface", "project", "zone",)
)
)
def test_update_access_config_unary_rest_bad_request(
transport: str = "rest", request_type=compute.UpdateAccessConfigInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["access_config_resource"] = {
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.update_access_config_unary(request)
def test_update_access_config_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
access_config_resource=compute.AccessConfig(
external_ipv6="external_ipv6_value"
),
)
mock_args.update(sample_request)
client.update_access_config_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateAccessConfig"
% client.transport._host,
args[1],
)
def test_update_access_config_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_access_config_unary(
compute.UpdateAccessConfigInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
access_config_resource=compute.AccessConfig(
external_ipv6="external_ipv6_value"
),
)
def test_update_access_config_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.UpdateDisplayDeviceInstanceRequest, dict,]
)
def test_update_display_device_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["display_device_resource"] = {"enable_display": True}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_display_device_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_update_display_device_unary_rest_required_fields(
request_type=compute.UpdateDisplayDeviceInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update_display_device._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update_display_device._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "patch",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_display_device_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_update_display_device_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.update_display_device._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId",))
& set(("displayDeviceResource", "instance", "project", "zone",))
)
def test_update_display_device_unary_rest_bad_request(
transport: str = "rest", request_type=compute.UpdateDisplayDeviceInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["display_device_resource"] = {"enable_display": True}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.update_display_device_unary(request)
def test_update_display_device_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
display_device_resource=compute.DisplayDevice(enable_display=True),
)
mock_args.update(sample_request)
client.update_display_device_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateDisplayDevice"
% client.transport._host,
args[1],
)
def test_update_display_device_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_display_device_unary(
compute.UpdateDisplayDeviceInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
display_device_resource=compute.DisplayDevice(enable_display=True),
)
def test_update_display_device_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.UpdateNetworkInterfaceInstanceRequest, dict,]
)
def test_update_network_interface_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["network_interface_resource"] = {
"access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"alias_ip_ranges": [
{
"ip_cidr_range": "ip_cidr_range_value",
"subnetwork_range_name": "subnetwork_range_name_value",
}
],
"fingerprint": "fingerprint_value",
"ipv6_access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"ipv6_access_type": "ipv6_access_type_value",
"ipv6_address": "ipv6_address_value",
"kind": "kind_value",
"name": "name_value",
"network": "network_value",
"network_i_p": "network_i_p_value",
"nic_type": "nic_type_value",
"queue_count": 1197,
"stack_type": "stack_type_value",
"subnetwork": "subnetwork_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_network_interface_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_update_network_interface_unary_rest_required_fields(
request_type=compute.UpdateNetworkInterfaceInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["network_interface"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
assert "networkInterface" not in jsonified_request
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update_network_interface._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
assert "networkInterface" in jsonified_request
assert jsonified_request["networkInterface"] == request_init["network_interface"]
jsonified_request["instance"] = "instance_value"
jsonified_request["networkInterface"] = "network_interface_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update_network_interface._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id", "network_interface",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "networkInterface" in jsonified_request
assert jsonified_request["networkInterface"] == "network_interface_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "patch",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_network_interface_unary(request)
expected_params = [
("networkInterface", "",),
]
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_update_network_interface_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.update_network_interface._get_unset_required_fields({})
assert set(unset_fields) == (
set(("requestId", "networkInterface",))
& set(
(
"instance",
"networkInterface",
"networkInterfaceResource",
"project",
"zone",
)
)
)
def test_update_network_interface_unary_rest_bad_request(
transport: str = "rest", request_type=compute.UpdateNetworkInterfaceInstanceRequest
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["network_interface_resource"] = {
"access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"alias_ip_ranges": [
{
"ip_cidr_range": "ip_cidr_range_value",
"subnetwork_range_name": "subnetwork_range_name_value",
}
],
"fingerprint": "fingerprint_value",
"ipv6_access_configs": [
{
"external_ipv6": "external_ipv6_value",
"external_ipv6_prefix_length": 2837,
"kind": "kind_value",
"name": "name_value",
"nat_i_p": "nat_i_p_value",
"network_tier": "network_tier_value",
"public_ptr_domain_name": "public_ptr_domain_name_value",
"set_public_ptr": True,
"type_": "type__value",
}
],
"ipv6_access_type": "ipv6_access_type_value",
"ipv6_address": "ipv6_address_value",
"kind": "kind_value",
"name": "name_value",
"network": "network_value",
"network_i_p": "network_i_p_value",
"nic_type": "nic_type_value",
"queue_count": 1197,
"stack_type": "stack_type_value",
"subnetwork": "subnetwork_value",
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.update_network_interface_unary(request)
def test_update_network_interface_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
network_interface_resource=compute.NetworkInterface(
access_configs=[
compute.AccessConfig(external_ipv6="external_ipv6_value")
]
),
)
mock_args.update(sample_request)
client.update_network_interface_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateNetworkInterface"
% client.transport._host,
args[1],
)
def test_update_network_interface_unary_rest_flattened_error(transport: str = "rest"):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_network_interface_unary(
compute.UpdateNetworkInterfaceInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
network_interface="network_interface_value",
network_interface_resource=compute.NetworkInterface(
access_configs=[
compute.AccessConfig(external_ipv6="external_ipv6_value")
]
),
)
def test_update_network_interface_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize(
"request_type", [compute.UpdateShieldedInstanceConfigInstanceRequest, dict,]
)
def test_update_shielded_instance_config_unary_rest(request_type):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["shielded_instance_config_resource"] = {
"enable_integrity_monitoring": True,
"enable_secure_boot": True,
"enable_vtpm": True,
}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation(
client_operation_id="client_operation_id_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
end_time="end_time_value",
http_error_message="http_error_message_value",
http_error_status_code=2374,
id=205,
insert_time="insert_time_value",
kind="kind_value",
name="name_value",
operation_group_id="operation_group_id_value",
operation_type="operation_type_value",
progress=885,
region="region_value",
self_link="self_link_value",
start_time="start_time_value",
status=compute.Operation.Status.DONE,
status_message="status_message_value",
target_id=947,
target_link="target_link_value",
user="user_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_shielded_instance_config_unary(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.Operation)
assert response.client_operation_id == "client_operation_id_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.end_time == "end_time_value"
assert response.http_error_message == "http_error_message_value"
assert response.http_error_status_code == 2374
assert response.id == 205
assert response.insert_time == "insert_time_value"
assert response.kind == "kind_value"
assert response.name == "name_value"
assert response.operation_group_id == "operation_group_id_value"
assert response.operation_type == "operation_type_value"
assert response.progress == 885
assert response.region == "region_value"
assert response.self_link == "self_link_value"
assert response.start_time == "start_time_value"
assert response.status == compute.Operation.Status.DONE
assert response.status_message == "status_message_value"
assert response.target_id == 947
assert response.target_link == "target_link_value"
assert response.user == "user_value"
assert response.zone == "zone_value"
def test_update_shielded_instance_config_unary_rest_required_fields(
request_type=compute.UpdateShieldedInstanceConfigInstanceRequest,
):
transport_class = transports.InstancesRestTransport
request_init = {}
request_init["instance"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update_shielded_instance_config._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["instance"] = "instance_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).update_shielded_instance_config._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(("request_id",))
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "instance" in jsonified_request
assert jsonified_request["instance"] == "instance_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "patch",
"query_params": request_init,
}
transcode_result["body"] = {}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.update_shielded_instance_config_unary(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_update_shielded_instance_config_unary_rest_unset_required_fields():
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.update_shielded_instance_config._get_unset_required_fields(
{}
)
assert set(unset_fields) == (
set(("requestId",))
& set(("instance", "project", "shieldedInstanceConfigResource", "zone",))
)
def test_update_shielded_instance_config_unary_rest_bad_request(
transport: str = "rest",
request_type=compute.UpdateShieldedInstanceConfigInstanceRequest,
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"}
request_init["shielded_instance_config_resource"] = {
"enable_integrity_monitoring": True,
"enable_secure_boot": True,
"enable_vtpm": True,
}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.update_shielded_instance_config_unary(request)
def test_update_shielded_instance_config_unary_rest_flattened():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.Operation()
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.Operation.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"instance": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value",
zone="zone_value",
instance="instance_value",
shielded_instance_config_resource=compute.ShieldedInstanceConfig(
enable_integrity_monitoring=True
),
)
mock_args.update(sample_request)
client.update_shielded_instance_config_unary(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateShieldedInstanceConfig"
% client.transport._host,
args[1],
)
def test_update_shielded_instance_config_unary_rest_flattened_error(
transport: str = "rest",
):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_shielded_instance_config_unary(
compute.UpdateShieldedInstanceConfigInstanceRequest(),
project="project_value",
zone="zone_value",
instance="instance_value",
shielded_instance_config_resource=compute.ShieldedInstanceConfig(
enable_integrity_monitoring=True
),
)
def test_update_shielded_instance_config_unary_rest_error():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = InstancesClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = InstancesClient(client_options=options, transport=transport,)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = InstancesClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = InstancesClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.InstancesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = InstancesClient(transport=transport)
assert client.transport is transport
@pytest.mark.parametrize("transport_class", [transports.InstancesRestTransport,])
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_instances_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.InstancesTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_instances_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.compute_v1.services.instances.transports.InstancesTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.InstancesTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"add_access_config",
"add_resource_policies",
"aggregated_list",
"attach_disk",
"bulk_insert",
"delete",
"delete_access_config",
"detach_disk",
"get",
"get_effective_firewalls",
"get_guest_attributes",
"get_iam_policy",
"get_screenshot",
"get_serial_port_output",
"get_shielded_instance_identity",
"insert",
"list",
"list_referrers",
"remove_resource_policies",
"reset",
"send_diagnostic_interrupt",
"set_deletion_protection",
"set_disk_auto_delete",
"set_iam_policy",
"set_labels",
"set_machine_resources",
"set_machine_type",
"set_metadata",
"set_min_cpu_platform",
"set_scheduling",
"set_service_account",
"set_shielded_instance_integrity_policy",
"set_tags",
"simulate_maintenance_event",
"start",
"start_with_encryption_key",
"stop",
"test_iam_permissions",
"update",
"update_access_config",
"update_display_device",
"update_network_interface",
"update_shielded_instance_config",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
def test_instances_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.compute_v1.services.instances.transports.InstancesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.InstancesTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
def test_instances_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.compute_v1.services.instances.transports.InstancesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.InstancesTransport()
adc.assert_called_once()
def test_instances_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
InstancesClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
def test_instances_http_transport_client_cert_source_for_mtls():
cred = ga_credentials.AnonymousCredentials()
with mock.patch(
"google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
) as mock_configure_mtls_channel:
transports.InstancesRestTransport(
credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
)
mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
def test_instances_host_no_port():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com"
),
)
assert client.transport._host == "compute.googleapis.com:443"
def test_instances_host_with_port():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com:8000"
),
)
assert client.transport._host == "compute.googleapis.com:8000"
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = InstancesClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = InstancesClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = InstancesClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = InstancesClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = InstancesClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = InstancesClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = InstancesClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = InstancesClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = InstancesClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = InstancesClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = InstancesClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = InstancesClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = InstancesClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = InstancesClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = InstancesClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.InstancesTransport, "_prep_wrapped_messages"
) as prep:
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.InstancesTransport, "_prep_wrapped_messages"
) as prep:
transport_class = InstancesClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
def test_transport_close():
transports = {
"rest": "_session",
}
for transport, close_name in transports.items():
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"rest",
]
for transport in transports:
client = InstancesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[(InstancesClient, transports.InstancesRestTransport),],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
| 39.880976 | 123 | 0.65822 | 56,735 | 533,089 | 5.874451 | 0.013325 | 0.024984 | 0.02953 | 0.053203 | 0.95449 | 0.945581 | 0.934291 | 0.920897 | 0.908415 | 0.900002 | 0 | 0.007284 | 0.252855 | 533,089 | 13,366 | 124 | 39.883959 | 0.829501 | 0.127834 | 0 | 0.784576 | 0 | 0.003519 | 0.181775 | 0.045352 | 0 | 0 | 0 | 0.000075 | 0.13117 | 1 | 0.033037 | false | 0.000098 | 0.002444 | 0.000195 | 0.035676 | 0.004301 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a5da1e7c8b3ecbc6ce6948b7953cc749218e77ea | 149 | py | Python | openapydantic/__init__.py | richarddevers/openapydantic | b28d215f6edf110bf91d1a58ba9fd3762638e44d | [
"MIT"
] | 3 | 2022-03-23T08:25:52.000Z | 2022-03-25T20:01:13.000Z | openapydantic/__init__.py | richarddevers/openapydantic | b28d215f6edf110bf91d1a58ba9fd3762638e44d | [
"MIT"
] | null | null | null | openapydantic/__init__.py | richarddevers/openapydantic | b28d215f6edf110bf91d1a58ba9fd3762638e44d | [
"MIT"
] | null | null | null | from openapydantic import common # noqa
from openapydantic import resolver # noqa
from openapydantic import versions
load_api = versions.load_api
| 24.833333 | 42 | 0.825503 | 19 | 149 | 6.368421 | 0.473684 | 0.421488 | 0.570248 | 0.446281 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.147651 | 149 | 5 | 43 | 29.8 | 0.952756 | 0.060403 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
5727a22cafd0325fe6b2f623d575a0d664cf5a4a | 1,807 | py | Python | tests/test_password_reset.py | yaal-fr/canaille | 828d190adea7bc6e34d59bac42cbc1283509880b | [
"MIT"
] | 3 | 2020-11-03T14:44:53.000Z | 2021-09-26T16:49:01.000Z | tests/test_password_reset.py | yaal-fr/canaille | 828d190adea7bc6e34d59bac42cbc1283509880b | [
"MIT"
] | null | null | null | tests/test_password_reset.py | yaal-fr/canaille | 828d190adea7bc6e34d59bac42cbc1283509880b | [
"MIT"
] | null | null | null | from canaille.account import profile_hash
def test_password_reset(testclient, slapd_connection, user):
user.attr_type_by_name(conn=slapd_connection)
user.reload(conn=slapd_connection)
with testclient.app.app_context():
hash = profile_hash("user", user.userPassword[0])
res = testclient.get("/reset/user/" + hash, status=200)
res.form["password"] = "foobarbaz"
res.form["confirmation"] = "foobarbaz"
res = res.form.submit(status=302)
res = res.follow(status=200)
with testclient.app.app_context():
assert user.check_password("foobarbaz")
assert "Your password has been updated successfuly" in res.text
user.set_password("correct horse battery staple", conn=slapd_connection)
res = testclient.get("/reset/user/" + hash)
res = res.follow()
res = res.follow()
assert "The password reset link that brought you here was invalid." in res.text
def test_password_reset_bad_link(testclient, slapd_connection, user):
user.attr_type_by_name(conn=slapd_connection)
user.reload(conn=slapd_connection)
res = testclient.get("/reset/user/foobarbaz")
res = res.follow()
res = res.follow()
assert "The password reset link that brought you here was invalid." in res.text
def test_password_reset_bad_password(testclient, slapd_connection, user):
user.attr_type_by_name(conn=slapd_connection)
user.reload(conn=slapd_connection)
with testclient.app.app_context():
hash = profile_hash("user", user.userPassword[0])
res = testclient.get("/reset/user/" + hash, status=200)
res.form["password"] = "foobarbaz"
res.form["confirmation"] = "typo"
res = res.form.submit(status=200)
with testclient.app.app_context():
assert user.check_password("correct horse battery staple")
| 34.09434 | 83 | 0.714997 | 241 | 1,807 | 5.19917 | 0.244813 | 0.119713 | 0.106145 | 0.063847 | 0.861931 | 0.785315 | 0.782123 | 0.782123 | 0.727055 | 0.727055 | 0 | 0.011288 | 0.166574 | 1,807 | 52 | 84 | 34.75 | 0.820717 | 0 | 0 | 0.594595 | 0 | 0 | 0.198672 | 0.011621 | 0 | 0 | 0 | 0 | 0.135135 | 1 | 0.081081 | false | 0.351351 | 0.027027 | 0 | 0.108108 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
5731bd2452f85679f66f56c0ee1708d5dd5ee865 | 316 | py | Python | learnPython/diamond.py | ClariNerd617/personalProjects | 926b02c9998f9d17233f91d38a7250240c1b80dc | [
"CC0-1.0"
] | null | null | null | learnPython/diamond.py | ClariNerd617/personalProjects | 926b02c9998f9d17233f91d38a7250240c1b80dc | [
"CC0-1.0"
] | null | null | null | learnPython/diamond.py | ClariNerd617/personalProjects | 926b02c9998f9d17233f91d38a7250240c1b80dc | [
"CC0-1.0"
] | null | null | null | class A:
def method(self):
print('This method belongs to class A')
pass
class B(A):
def method(self):
print('This method belongs to class B')
pass
class C(A):
def method(self):
print('This method belongs to class C')
pass
class D(B, C):
pass
d = D()
d.method() | 15.8 | 47 | 0.575949 | 50 | 316 | 3.64 | 0.26 | 0.065934 | 0.164835 | 0.230769 | 0.708791 | 0.708791 | 0.708791 | 0.708791 | 0.708791 | 0.708791 | 0 | 0 | 0.303797 | 316 | 20 | 48 | 15.8 | 0.827273 | 0 | 0 | 0.4375 | 0 | 0 | 0.283912 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1875 | false | 0.25 | 0 | 0 | 0.4375 | 0.1875 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
573db631f0015b8a5047277efedea53d0033f0c6 | 92,910 | py | Python | plugins/users.py | gorpoorko/Bot-Tcxs-Heroku | b272b1c491ec2bea14e65f1d4e0c96c23bc2d815 | [
"FTL"
] | 1 | 2020-12-11T10:15:46.000Z | 2020-12-11T10:15:46.000Z | plugins/users.py | gorpoorko/Bot-Tcxs-Heroku | b272b1c491ec2bea14e65f1d4e0c96c23bc2d815 | [
"FTL"
] | null | null | null | plugins/users.py | gorpoorko/Bot-Tcxs-Heroku | b272b1c491ec2bea14e65f1d4e0c96c23bc2d815 | [
"FTL"
] | 1 | 2021-06-22T19:27:38.000Z | 2021-06-22T19:27:38.000Z | # -*- coding: utf-8 -*-
#███╗ ███╗ █████╗ ███╗ ██╗██╗ ██████╗ ██████╗ ███╗ ███╗██╗ ██████╗
#████╗ ████║██╔══██╗████╗ ██║██║██╔════╝██╔═══██╗████╗ ████║██║██╔═══██╗
#██╔████╔██║███████║██╔██╗ ██║██║██║ ██║ ██║██╔████╔██║██║██║ ██║
#██║╚██╔╝██║██╔══██║██║╚██╗██║██║██║ ██║ ██║██║╚██╔╝██║██║██║ ██║
#██║ ╚═╝ ██║██║ ██║██║ ╚████║██║╚██████╗╚██████╔╝██║ ╚═╝ ██║██║╚██████╔╝
#╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═════╝
# [+] @GorpoOrko 2020 - Telegram Bot and Personal Assistant [+]
# | TCXS Project Hacker Team - https://tcxsproject.com.br |
# | Telegram: @GorpoOrko Mail:gorpoorko@protonmail.com |
# [+] Github Gorpo Dev: https://github.com/gorpo [+]
import time
from amanobot.namedtuple import InlineKeyboardMarkup
import keyboard
from datetime import datetime
from config import bot, version, bot_username, git_repo,logs,sudoers
import sqlite3
import os
from plugins.admins import is_admin
async def users(msg):
# variaveis que iniciam a Database para enviar a att paga pelos BOTOES
conexao_sqlite = sqlite3.connect('bot_database.db')
conexao_sqlite.row_factory = sqlite3.Row
cursor_sqlite = conexao_sqlite.cursor()
# database logs ---->
conexao_logs = sqlite3.connect('bot_database_logs.db')
conexao_logs.row_factory = sqlite3.Row
cursor_logs = conexao_logs.cursor()
try:
id_usuario = msg['from']['id']
adm = await is_admin(msg['chat']['id'], msg['from']['id'], id_usuario)
except Exception as e:
pass
if msg.get('text') and msg['chat']['type'] == 'supergroup':
if msg['from']['first_name']:
pass
print('->Usuario:{} ->Envio:{} ->Grupo:{} ->Data/Hora:{} '.format(msg['from']['first_name'],msg['text'],msg['chat']['title'],time.ctime()))
## SISTEMA DE GRAVAÇÃO E ENVIO DE LOGS ---------------------------------------------------------------------------------------------------------------->
if msg['text'].lower() == 'logs':
if adm['user'] == True:
cursor_logs.execute("""SELECT * FROM mensagens ; """)
mensagens_logs = cursor_logs.fetchall()
arquivo_logs = open('arquivos/logs.txt', 'a',encoding='utf-8')
arquivo_logs.write('-------[+] REGISTO DE MENSAGENS CAPTADAS PELO BOT NOS GRUPOS E PRIVADO [+]-------\n\n')
for mensagem in mensagens_logs:
grupo = mensagem['grupo']
tipo_grupo = mensagem['tipo_grupo']
id_grupo = mensagem['id_grupo']
usuario = mensagem['usuario']
id_usuario = mensagem['id_usuario']
linguagem = mensagem['linguagem']
tipo = mensagem['tipo']
data = mensagem['data']
mensagem = mensagem['mensagem']
try:
texto = f"Usuario: {usuario} |Id Usuario:{id_usuario} | Linguagem: {linguagem} | Grupo: {grupo} | Id Grupo: {id_grupo} | Tipo Grupo: {tipo_grupo} Tipo: {tipo} | Data: {data} ----->\nMensagem: {mensagem}\n"
except:
texto = ''
arquivo_logs.write(texto)
arquivo_logs.close()
await bot.sendDocument(msg['chat']['id'], open('arquivos/logs.txt','rb'), reply_to_message_id=msg['message_id'])
await bot.sendMessage(msg['chat']['id'], '`{} Esta aqui o log de conversas que tenho armazenado, espero que não tenha nada neste log que te incrimine!`'.format(msg['from']['first_name']),'markdown', reply_to_message_id=msg['message_id'])
os.remove('arquivos/logs.txt')
else:
await bot.sendMessage(msg['chat']['id'], f"@{msg['from']['username']} `este comando é permitido so para admin's`",'markdown')
#LIMPAR OS LOGS
if msg['text'].lower() == 'limpar logs' or msg['text'].lower() == 'apagar logs' or msg['text'].lower() == 'backup logs':
if adm['user'] == True:
cursor_logs.execute("""SELECT * FROM mensagens; """)
mensagens_logs = cursor_logs.fetchall()
arquivo_logs = open('arquivos/logs.txt', 'a',encoding='utf-8')
arquivo_logs.write('-------[+] REGISTO DE MENSAGENS CAPTADAS PELO BOT NOS GRUPOS E PRIVADO [+]-------\n\n')
for mensagem in mensagens_logs:
grupo = mensagem['grupo']
tipo_grupo = mensagem['tipo_grupo']
id_grupo = mensagem['id_grupo']
usuario = mensagem['usuario']
id_usuario = mensagem['id_usuario']
linguagem = mensagem['linguagem']
tipo = mensagem['tipo']
data = mensagem['data']
mensagem = mensagem['mensagem']
try:
texto = f"Usuario: {usuario} |Id Usuario:{id_usuario} | Linguagem: {linguagem} | Grupo: {grupo} | Id Grupo: {id_grupo} | Tipo Grupo: {tipo_grupo} Tipo: {tipo} | Data: {data} ----->\nMensagem: {mensagem}\n"
except:
texto = ''
arquivo_logs.write(texto)
arquivo_logs.close()
await bot.sendDocument(msg['chat']['id'], open('arquivos/logs.txt', 'rb'),reply_to_message_id=msg['message_id'])
await bot.sendMessage(msg['chat']['id'],'`{} Esta aqui o Backup de logs de conversas que tenho armazenado, caso preciso guarde este arquivo pois irei limpar a Database`'.format(msg['from']['first_name']), 'markdown', reply_to_message_id=msg['message_id'])
os.remove('arquivos/logs.txt')
cursor_logs.execute("""DELETE FROM mensagens""")
conexao_logs.commit()
await bot.sendMessage(msg['chat']['id'], f"🤖 {msg['from']['first_name']} Todas os logs de usuários e grupos foram apagados!")
else:
await bot.sendMessage(msg['chat']['id'], f"@{msg['from']['username']} `este comando é permitido so para admin's`",'markdown')
#SISTEMA DE BOTOES INICIO ---------------------------------------------------------------->
if msg['text'].lower() == 'comando' or msg['text'] == '/help' or msg['text'] == '/comandos' or msg['text'] == 'comandos' or 'help' in msg['text'].lower() or 'ajuda' in msg['text'].lower():
kb = InlineKeyboardMarkup(inline_keyboard=[
[dict(text="📦 TCXS Project Store PKG", callback_data='store_doadores')],
[dict(text='🦸 Usuários', callback_data='comandos_usuarios')] +
[dict(text="🤖 Admin's", callback_data='comandos_admins')],
[dict(text='🧰 Ferramentas', callback_data='ferramentas_gerais')] +
[dict(text='📣 Info | Extras', callback_data='infos_extras')],])
await bot.sendMessage(msg['chat']['id'],f"***{msg['from']['first_name']} Aqui está uma lista com todos meus comandos e informações que você precisa saber.***" ,'markdown', reply_markup=kb)
#return True
#PEGA OS DADOS DO keyboard.py ----------------------:
elif msg.get('data') and msg.get('message'):
if msg['data'] == 'inicio_menu':# precisa de dois menus para voltar para o inicio criando um loop entre os dois----->
kb = InlineKeyboardMarkup(inline_keyboard=[
[dict(text="📦 TCXS Project Store PKG", callback_data='store_doadores')],
[dict(text='🦸 Usuários', callback_data='comandos_usuarios')] +
[dict(text="🤖 Admin's", callback_data='comandos_admins')],
[dict(text='🧰 Ferramentas', callback_data='ferramentas_gerais')] +
[dict(text='📣 Info | Extras', callback_data='infos_extras')], ])
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"***{msg['from']['first_name']} Aqui está uma lista com todos meus comandos e informações que você precisa saber.***", 'markdown',reply_markup=kb)
#TCXS STORE PKG DOADORES | PAYD------------------->
elif msg['data'] == 'store_doadores':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"```------ Leia atentamente como adquirir acesso a Loja para Doadores, caso discorde basta não doar. Caso queira doar agora ou renovar sua entrada no grupo de doadores clique em Doar Agora, você será redirecionado para o Mercado Pago da TCXS Project. Não prestamos reembolsos e após doar basta enviar um comprovante no privado dos administradores.```\n`Pra ver os administradores digite:` /admin",'markdown', reply_markup=keyboard.store_doadores)
#return True
elif msg['data'] == 'como_participar':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"```------ Para participar você precisa fazer uma doação, pagamos mensalmente Dropbox de 5tb para armazenamento dos jogos e o valor é cobrado em dolar, a doação é mensal e doando você não esta comprando um produto, mas sim participando de uma vaquinha, todo dinheiro arrecadado fica retido na conta do Mercado Pago para pagarmos o servidor, resumindo contribuindo você faz parte de uma vaquinha de doadores que mantem o servidor, nós da TCXS Project não temos lucro e nosso trabalho é voluntário, caso queira ajudar em algo e se juntar a equipe é bem vindo. Leia atentamente esta documentação e caso discorde de algo pedimos que não doe, não prestamos reembolsos.```\n`Pra ver os administradores digite:` /admin",'markdown', reply_markup=keyboard.voltar_store_doadores)
await bot.sendMessage(msg['message']['chat']['id'], 'http://tcxsproject.com.br/doadores-tcxs-store-regras/')
elif msg['data'] == 'mercado_pago':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"```------ Vejo que tem interesse em ser doador, usamos o sistema do Mercado Pago somente, favor nao insistir com outras formas.\nO Mercado Pago aceita pagamentos online e com cartão de crédito e boletos, este sistema é o mais seguro para nos da equipe e para vocês doadores, lembre que a doação é mensal e doando você faz parte da vaquina que mantem os servidores de 5tb da Dropbox onde encontram-se nossos jogos. Pedimos que antes de doar leia atentamente as regras como mencionado antes e após fazer sua doação envie o comprovante no privado de um de nossos administradores.```\n`Pra ver os administradores digite:` /admin",'markdown', reply_markup=keyboard.voltar_store_doadores)
await bot.sendMessage(msg['message']['chat']['id'], 'https://www.mercadopago.com.br/checkout/v1/redirect?pref_id=354396246-315fce8c-d8f9-4aa0-8583-95d678936375')
## ATUALIZAÇÃO PARA DOADORES ATRAVÉS DO SISTEMA DE BOTÕES------------------------------------------------------------------------------>>
#LOJA HAN------------>
elif msg['data'].split()[0] == 'download_store_HAN':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"📦 `INSTRUÇÕES:` ```------ Bem vindo a TCXS Project ,agora você faz parte dela, entenda que as doações sao mensais e nossa equipe nao ganha nada por este projeto, todo dinheiro arrecadado neste grupo é para pagar os servidores dos quais dispomos jogos. Logo a PSN STUFF IRÁ ACABAR POIS OS SERVIDORES SERÃO DESLIGADOS e assim nao terá mais os jogos gratuitos por ai, restando apenas este acervo que é mantido por voces doadores! Vamos a Instalação!!! --> Espero que tenha um pendrive em mãos! --> copie os arquivos da VERSÃO de acordo com seu Exploit/Desbloqueio, se voce tem HAN ou CFW use a loja HAN, caso contrário e seja o Exploit HEN em seu console use a loja HEN, é necessaria a instalacao deste arquivo para que a loja apareca em seu console! Ative seu HAN/HEN e instale a loja , recomendamos reiniciar o console após este processo!!```",'markdown', reply_markup=keyboard.voltar_store_doadores)
cursor_sqlite.execute("""SELECT * FROM loja_HAN""")
resultados = cursor_sqlite.fetchall()
if resultados == []:
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"🤖 ***Bot diz:*** `não tenho lojas cadastradas, insira o banco de dados com dados ou cadastre um PKG enviando ela no meu privado com o nome:` ***TCXS_STORE_HAN.pkg***",'markdown', reply_markup=keyboard.voltar_store_doadores)
else:
for resultado in resultados:
id_pkg = resultado['pkg']
nome_pkg = resultado['versao']
data_att = resultado['data']
uploader_id = resultado['uploader']
await bot.sendDocument(msg['message']['chat']['id'], document=id_pkg,caption=f'{nome_pkg} upada em {data_att} por @{uploader_id}')
#LOJA HEN------------>
elif msg['data'].split()[0] == 'download_store_HEN':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"📦 `INSTRUÇÕES:` ```------ Bem vindo a TCXS Project ,agora você faz parte dela, entenda que as doações sao mensais e nossa equipe nao ganha nada por este projeto, todo dinheiro arrecadado neste grupo é para pagar os servidores dos quais dispomos jogos. Logo a PSN STUFF IRÁ ACABAR POIS OS SERVIDORES SERÃO DESLIGADOS e assim nao terá mais os jogos gratuitos por ai, restando apenas este acervo que é mantido por voces doadores! Vamos a Instalação!!! --> Espero que tenha um pendrive em mãos! --> copie os arquivos da VERSÃO de acordo com seu Exploit/Desbloqueio, se voce tem HAN ou CFW use a loja HAN, caso contrário e seja o Exploit HEN em seu console use a loja HEN, é necessaria a instalacao deste arquivo para que a loja apareca em seu console! Ative seu HAN/HEN e instale a loja, recomendamos reiniciar o console após este processo!!```",'markdown', reply_markup=keyboard.voltar_store_doadores)
cursor_sqlite.execute("""SELECT * FROM loja_HEN""")
resultados = cursor_sqlite.fetchall()
if resultados == []:
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"🤖 ***Bot diz:*** `não tenho lojas cadastradas, insira o banco de dados com dados ou cadastre um PKG enviando ela no meu privado com o nome:` ***TCXS_STORE_HEN.pkg***",'markdown', reply_markup=keyboard.voltar_store_doadores)
else:
for resultado in resultados:
id_pkg = resultado['pkg']
nome_pkg = resultado['versao']
data_att = resultado['data']
uploader_id = resultado['uploader']
await bot.sendDocument(msg['message']['chat']['id'], document=id_pkg,caption=f'{nome_pkg} upada em {data_att} por @{uploader_id}')
# FIX CFW XML DOADORES COM DATABASE E BOTOES------------>
elif msg['data'].split()[0] == 'download_fix_cfw_doadores':
cursor_sqlite.execute("""SELECT * FROM fix_cfw_xml""")
resultados = cursor_sqlite.fetchall()
if resultados == []:
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"🤖 ***Bot diz:*** `não tenho o fix cfw xml, insira o banco de dados com dados ou cadastre um PKG enviando ele no meu privado com nome de:` ***category_network_tool2.xml***",'markdown', reply_markup=keyboard.voltar_store_doadores)
else:
for resultado in resultados:
id_pkg = resultado['pkg']
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"🚨 `ATENÇÃO`🚨 ```------ Veja o tutorial INSTALAÇÃO EM CONSOLES CFW no menu abaixo ```",'markdown', reply_markup=keyboard.voltar_store_doadores)
await bot.sendDocument(msg['message']['chat']['id'], document=id_pkg,caption='Fix para usuários CFW')
#OUTROS BOTOES IMPORTANTES DA ATT--------------------->
elif msg['data'].split()[0] == 'tutorial_loja':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"📦 `TUTORIAL:` ```------ Abaixo temos o Tutorial TCXS Store instalar a loja em seu PlayStation3!```",'markdown', reply_markup=keyboard.voltar_store_doadores)
await bot.sendMessage(msg['message']['chat']['id'],'https://youtu.be/RakJkOu7BdE')
#return True
elif msg['data'].split()[0] == 'tutorial_cfw':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"📦 `TUTORIAL:` ```------ Abaixo temos o Tutorial TCXS Store ensinando como usar em consoles CFW PlayStation3!```",'markdown', reply_markup=keyboard.voltar_store_doadores)
await bot.sendMessage(msg['message']['chat']['id'],'https://youtu.be/RakJkOu7BdE')
#return True
elif msg['data'].split()[0] == 'tutorial__uso_web':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"📦 `TUTORIAL:` ```------ Abaixo temos o Tutorial TCXS Store ensinando como fazer os Downloads em Segundo Plano em seu PlayStation3!```",'markdown', reply_markup=keyboard.voltar_store_doadores)
await bot.sendMessage(msg['message']['chat']['id'],'https://youtu.be/RakJkOu7BdE')
#return True
elif msg['data'].split()[0] == 'fone_bluetooth_doadores':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"📦 `TUTORIAL:` ```------ Sabia que você pode usar seu fone bluetooth para jogos em seu PlayStation3?```",'markdown', reply_markup=keyboard.voltar_store_doadores)
await bot.sendMessage(msg['message']['chat']['id'],'https://www.youtube.com/watch?v=_wYG7iMa5uY')
#return True
elif msg['data'].split()[0] == 'proxy_usuarios_doadores':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"📦 `TUTORIAL:` ```------ Siga nosso tutorial de proxy para melhorar sua conexão e evitar banimento do seu PlayStation3!```",'markdown', reply_markup=keyboard.voltar_store_doadores)
await bot.sendMessage(msg['message']['chat']['id'],'https://youtu.be/l4o8ySk1Do4')
#return True
# COMANDOS_USUARIOS ------------------------------------------------->
elif msg['data'] == 'comandos_usuarios':#esta tabela pela a reply_markup da primeira e le os dados do keyboard.py e oque respondido volta pra ca ou nao, para usar local "palavra" para usar la
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"***Comandos para usuários:***",'markdown', reply_markup=keyboard.comandos_usuarios)
#return True
elif msg['data'] == 'comandos_users':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),
f"""/start -inicia o bot
/regras -leia nossas regras
/admin -admins do grupo
/freepkg -loja gratuita PS3
/fix -fix han
/tutorial -como instalar a loja
/rap -licenças dos jogos
/desbloqueio -desbloquear PS3
/segundoplano -download
/codigoerro -codigos PSN/PS3
/listajogos -download direto
/doadores -instruções
/mercadopago -doar/loja
/tcxs -informações sobre
/tcxspyject -criar lojas
/ps1 -cria xml para loja
/ps2 -cria xml para loja
/psp -cria xml para loja
/ps3 -cria xml para loja
/proxy -velocidade no PS3
/tr -traduz um texto
/yt -pesquisa videos no YouTube
/r -pesquisa um termo no redit
/clima -exibe informacoes de clima
/coub -pesquisa de pequenas anima??es
/dados -jogo de dados
/gif -gif do giphy
/git -usuario do github
/id -id do usuario
/ip -informa dados de um ip
/jsondump -retorna dados formatados
/stickerid -pega id de um sticker
/getsticker -baixa um sticker
/pypi -pesquisa libs python
/rextester -interpretador de varias linguagens de programação
/mark -repete o texto informado usando Markdown
/html -repete o texto informado usando HTML
/request -faz uma requisicao a um site
/rt -repete concordando com o usuario na reposta
/fala -Repete o texto que voce pedir para ele falar
/print -gera um print doque falou
/dogbin - envia seu material em texto para o dogbin
/hastebin - envia seu material em texto para o hastebin
/echo - Repete o texto informado.
/shorten - Encurta uma URL.
/recog - reconhecimento com IA (nem sempre disponivel)
/notepad - cria um site com o texto enviado
/crawler - pega todos links dentro de um site
/corrigir - corrige palavras erradas
/token - Exibe informaces de um token de bot.
""",'markdown', reply_markup=keyboard.voltar_comandos_usuarios)
#return True
elif msg['data'] == 'sites_users':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),"/torrent -pkg torrent\n/pkg_games -pkg's\n/site -doadores\n/facebook -facebook cadastre-se\n/anime -anime gratis\n/onion -deepweb\n/dev -hacker ", reply_markup=keyboard.voltar_comandos_usuarios)
#return True
elif msg['data'] == 'cria_xml_users':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),"""***Temos um programa de computador que cria lojas diretamente no console PlayStation3***
/tcxspyject -criar lojas
***Criar XML PSP Instruções:***
`comando:`/psp -cria xml para loja
```
1 - meu comando sempre começa com /xml
2 - eu não aceito espaços no nome de arquivo, nome de jogo e nem na descrição!
3 - você pode copiar o caractere especial invisivel dentro das aspas abaixo para usar onde precisar de espaço!```
`Copie de dentro das aspas o caractere invisivel:`"⠀"
**VAMOS AO COMANDO EM SI**
`Exemplo com caractere invisivel:`
``` gow god⠀of⠀war descriçao⠀usando⠀caractere⠀invisivel www.linkdropbox.com```
`Exemplo sem caractere visivel:`
``` /psp gow god_of_war descrição_sem_caractere_visivel www.linkdropbox.com```
**Onde cada campo:**
`/psp` ```- chama comando```
`gow` ```- nome do xml```
`god_of_war` ```- nome do jogo, se quiser tirar os _ usar caractere especial no lugar```
`descrição_do_jogo` ```- descrição, se quiser tirar os _ usar caractere especial no lugar```
`www.linkdropbox.com` ```- Link do Dropbox```
***Criar XML PS1 Instruções:***
`comando:`/ps1 -cria xml para loja
```
1 - meu comando sempre começa com /xml
2 - eu não aceito espaços no nome de arquivo, nome de jogo e nem na descrição!
3 - você pode copiar o caractere especial invisivel dentro das aspas abaixo para usar onde precisar de espaço!```
`Copie de dentro das aspas o caractere invisivel:`"⠀"
**VAMOS AO COMANDO EM SI**
`Exemplo com caractere invisivel:`
``` gow god⠀of⠀war descriçao⠀usando⠀caractere⠀invisivel www.linkdropbox.com```
`Exemplo sem caractere visivel:`
``` /ps1 gow god_of_war descrição_sem_caractere_visivel www.linkdropbox.com```
**Onde cada campo:**
`/ps1` ```- chama comando```
`gow` ```- nome do xml```
`god_of_war` ```- nome do jogo, se quiser tirar os _ usar caractere especial no lugar```
`descrição_do_jogo` ```- descrição, se quiser tirar os _ usar caractere especial no lugar```
`www.linkdropbox.com` ```- Link do Dropbox```
***Criar XML PS2 Instruções:***
`comando:`/ps2 -cria xml para loja
```
1 - meu comando sempre começa com /xml
2 - eu não aceito espaços no nome de arquivo, nome de jogo e nem na descrição!
3 - você pode copiar o caractere especial invisivel dentro das aspas abaixo para usar onde precisar de espaço!```
`Copie de dentro das aspas o caractere invisivel:`"⠀"
**VAMOS AO COMANDO EM SI**
`Exemplo com caractere invisivel:`
``` gow god⠀of⠀war descriçao⠀usando⠀caractere⠀invisivel www.linkdropbox.com```
`Exemplo sem caractere visivel:`
``` /ps2 gow god_of_war descrição_sem_caractere_visivel www.linkdropbox.com```
**Onde cada campo:**
`/ps2` ```- chama comando```
`gow` ```- nome do xml```
`god_of_war` ```- nome do jogo, se quiser tirar os _ usar caractere especial no lugar```
`descrição_do_jogo` ```- descrição, se quiser tirar os _ usar caractere especial no lugar```
`www.linkdropbox.com` ```- Link do Dropbox```
***Criar XML PS3 Instruções:***
`comando:`/ps3 -cria xml para loja
```
1 - meu comando sempre começa com /xml
2 - eu não aceito espaços no nome de arquivo, nome de jogo e nem na descrição!
3 - você pode copiar o caractere especial invisivel dentro das aspas abaixo para usar onde precisar de espaço!
4 - meu sistema para por jogos de PS3 aceitam apenas 3 links preciso deles como exemplos.```
`Copie de dentro das aspas o caractere invisivel:`"⠀"
**VAMOS AO COMANDO EM SI**
`Exemplo com caractere invisivel:`
``` gow god⠀of⠀war descriçao⠀usando⠀caractere⠀invisivel www.linkdropbox.com www.linkdropbox.com www.linkdropbox.com```
`Exemplo sem caractere visivel:`
``` /ps3 gow god_of_war descrição_sem_caractere_visivel www.linkdropbox.com www.linkdropbox.com www.linkdropbox.com```
**Onde cada campo:**
`/ps3` ```- chama comando```
`gow` ```- nome do xml```
`god_of_war` ```- nome do jogo, se quiser tirar os _ usar caractere especial no lugar```
`descrição_do_jogo` ```- descrição, se quiser tirar os _ usar caractere especial no lugar```
`www.linkdropbox.com` ```- Link do Dropbox, preciso de 3 links separados por espaço```
/ps3 -cria xml para loja""",'markdown', reply_markup=keyboard.voltar_comandos_usuarios)
#return True
elif msg['data'] == 'lista_jogos_users':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),
f"```------ Tenho esta lista de jogos em PKG para PlayStation3 todos com links diretos, estes jogos são originais, portanto precisam de Licenças ou como conhecemos precisam dos Rap's```\n`Basta clicar no botão que te trarei a lista com link direto para download, pedimos sua contribuição para que este projeto se mantenha vivo, Obrigado a todos da TCXS!` ",
'markdown', reply_markup=keyboard.lista_jogos)
elif msg['data'] == 'info_doacao_users':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),
f"""```------ A TCXS Project fornece e desenvolve o aplicativo para PlayStation3 TCXS Store, para poder ter nosso aplicativo em seu console basta fazer uma doação nos botões deste bot ou pelo site, antes de doar leia atentamente a todas as regras, já quero explicar como funciona a doação, todo montante arrecadado fica preso em uma conta do Mercado Pago a qual é usada para pagar o servidor do Dropbox e outros serviços, ao doar você esta participando de uma vaquinha onde a união de todos doadores mantém a vaquinha no mercado pago assim possibilitando pagar os serviços que usamos, nossa loja não é paga e em momento algum você é obrigado a pagar, fornecemos jogos para download direto aqui neste bot bem como temos uma loja free que tem todos jogos das demais lojas free, a loja ficou definida apenas para doadores a pedido deles, pois o download fica muito mais rápido e não temos mais perda de jogos, ressalto que o grupo de doadores esta limitado apenas a 200 pessoas e caso esteja lotado você terá que esperar alguem sair, continuando... Logo após doar você deve ir em nosso grupo de telegram e procurar por @MsT3Dz ou @Odeiobot e mostrar seu comprovante de doação assim você estará dentro do grupo que contém as novidades, jogos e nossa TCXS Store PKG PlayStation3.```
` As doações são feitas pelo mercado pago, onde aceitamos todos os cartões, pagamentos online e boletos.
Não prestamos reembolsos pois se trata de doações e não uma venda direta para uso dos serviços!
O material completo é apenas para doadores. Além do projeto para PlayStation3 a TCXS Project conta com inumeros projetos e Sites para seu entreterimento. Após fazer sua doação basta ir no grupo de TELEGRAM e procurar pelo nosso administrador @MsT3Dz ou @Odeiobot , enviar um print de seu comprovante de pagamento que ele irá fornecer acesso a todo material, exigimos que seja feito o pedido no grupo! Outros administradores não irão te responder no privado, contamos com seu bom senso e cordialidade! NÃO PRESTAMOS REEMBOLSOS!
Queremos deixar a todos cientes que as doações feitas são exclusivas para pagar os servidores da Dropbox e serviços como hospedagem de site, sendo assim nos adm’s declaramos não receber nenhum valor neste projeto sendo assim nosso trabalho voluntário e todo e qualquer que queira entrar na equipe para ajudar a contribuir de forma expontanêa é bem vindo. Nossa equipe desenvolve sem cobrar nada pela sua mão de obra os sites acima citados bem como o desenvolvimento da TCXS Store PKG e a conversão e upload de jogos dentro dos servidores da Dropbox para assim os fornecer em formato NO-HAN para os usuários, fornecemos dentro da Plataforma PlayStation3 jogos de PS2, PS2, PsP, Emuladores das mais diversas plataformas! Álem disto disponibilizamos aos usuários a experiencia de ter sites para download de jogos nas mais variadas paltaformas e em especial jogos de PS3 PKG tudo aberto gratuitamente a comunidade bem como este site e outros sites mencionados aqui e que encontram-se nos menus.`""",
'markdown', reply_markup=keyboard.voltar_comandos_usuarios)
# return True
elif msg['data'] == 'info_requisitos_users':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),
f"""```------ Para usar a TCXS Store PKG você precisa ter seu console exploitado ou desbloqueado, nossa loja funciona nos consoles CFW, OFW, nas versões HAN e HEN, porém precis atender alguns requisitos para usar a TCXS Store PKG:
- Console Desbloqueado/exploitado.
- Versão exploit Han/Hen.
- Assinaturas previamente inseridas ( Raps’).
- Configurações de internet corretas.
- Espaço para download de jogos em seu hd.
- Conhecer previamente tudo sobre seu sistema de desbloqueio/exploit.
- Saber solucionar seus erros.
- Estar ciente que ao doar para a TCXS Store você não esta fazendo uma compra e sim ajudando a pagar os servidores da Dropbox onde upamos os jogos.CONSIDERE SE PARTICIPANDO DE UMA VAQUINHA COLETIVA ONDE TODOS USUARIOS DA TCXS AJUDAM NESTA VAQUINHA PARA MANTER O SERVIDOR```""",
'markdown', reply_markup=keyboard.voltar_comandos_usuarios)
# return True
elif msg['data'] == 'info_suporte_users':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),
f"""```------ Prestamos suporte somente para nosso aplicativo e jogos para doadores, estejam cientes que:
Suporte será prestado somente para a TCXS Store dos Doadores.
Suporte será prestado somente para jogos que são convertidos pela equipe para Doadores.
Por se tratar de copias modificadas de jogos nossos jogos constantemente são reupados.
Por se tratar de copias modificadas ao cair dos links, os mesmos após conteúdo upado, são substitúidos na TCXS Store PKG para Doadores.
Tenha ciencia de que links podem vir a cair ( não temos frequencia disto).
Saiba que a administração não presta suporte para seu desbloqueio e exploit, mas aconselhamos levar em um técnico competente caso não saiba realizar as operações básicas e avançadas de seu console.
Caso queira se aventurar em aprender tudo sobre seu desbloqueio ou exploit aconselhamos o fórum da PSX Place que são os desenvolvedores do desbloqueio e exploit, não iremos dar suporte ao material de terceiros ou erros cometidos por usuarios ou consoles vindo de tecnicos que não fizeram um bom exploit ou um bom desbloqueio.```""",
'markdown', reply_markup=keyboard.voltar_comandos_usuarios)
# return True-------------------
#COMANDOS ADMINS--------------------------------------------------------------------------------------->
#COMANDOS PARA OS BOTOES DOS ADMINISTRADORES
elif msg['data'] == 'comandos_admins':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"```------ Os comandos aqui listados funcionam apenas para administradores de grupos e o menu Desenvolvedor somente quem hospeda pode usar. ```",'markdown', reply_markup=keyboard.comandos_admins)
#return True
elif msg['data'] == 'gerenciar_grupos':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),"""/start - inicia o bot
/welcome -boas vindas
/ban -bane usuario
/unban -desbane usuario
/kick -kicka usuario
/mute -muta usuario
/unmute -desmuta usuario
/unwarn -remove advertencias
/warn -adverte usuario
/pin -fixa posts
/unpin -desfixa posts
/title -muda titulo grupo
/defregras -define regras
/regras -ler regras
/config -privado
/admdebug -debug admin
/id -id usuario
/ip -dados ip
/jsondump -retorna dados
/stickerid -id sticker
/getsticker -baixa sticker
/criar_sticker -cria pacote stickers
/kibar -copia sticker para o pacote de stickers
/mark -repete o texto markdown
/html -repete o texto HTML
/request -requisição site
/recog - reconhecimento com IA (nem sempre disponivel)
/notepad - cria um site com o texto enviado
/crawler - pega todos links dentro de um site
/corrigir - corrige palavras erradas
/link - pega link de um arquivo use como resposta""", reply_markup=keyboard.voltar_comandos_admins)
#return True
elif msg['data'] == 'cadastrar_comandos':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),"""
💾***CADASTRO DE COMANDOS E REPOSTAS NA DATABASE***
🤖`Para cadastrar um comando no banco de dados:`
#comando resposta que o usuário vai receber
🤖`Para recadastrar um comando no banco de dados:`
$comando resposta que o usuário vai receber
🤖`Para deletar um comando`
%comando
""",'markdown', reply_markup=keyboard.voltar_comandos_admins)
#return True
elif msg['data'] == 'cadastrar_lojas':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),"""
💾***CADASTRAR ARQUIVOS LOJAS DOADORES/FREE***
```Este bot cadastra as lojas para doadores e free, cadastra também os fix pkg e os fix xml, para atualizar as lojas ou fix pkg e xml basta enviar elas no privado do bot, e ele cadastrará seus arquivos desde que estejam de acordo com as instruções abaixo. Pode ocorrer falhas na hora de cadastrar️, caso não tenha cadastrado envie novamente o arquivo, jamais envie mais de um arquivo por vez.```
🤖***Cadastrar Loja Free:*** `Cadastre a LOJA GRATUITA FREE PKG enviando ela no meu privado com nome terminando com free.pkg, antes disto você pode por qualquer coisa no nome no arquivo como exemplo:` ***TCXS_3.6_free.pkg***
🤖***Cadastrar Loja Doadores:*** `Cadastre a LOJA PARA DOADORES PKG enviando ela no meu privado com nome inicinando com TCXS, após este nome você pode escrever oque quiser no arquivo como exemplo:` ***TCXS_Store3.9.pkg***
🤖***Cadastrar Fix HAN PKG:*** `Cadastre o FIX HAN PKG enviando ela no meu privado exatamente conforme exemplo:` ***FIX_HAN.pkg***
🤖***Cadastrar Fix HEN PKG:*** `Cadastre o FIX HEN PKG enviando ela no meu privado exatamente conforme exemplo:` ***FIX_HEN.pkg***
🤖***Cadastrar Fix CFW XML:*** `Cadastre o FIX CFW XML enviando ela no meu privado exatamente conforme exemplo:` ***category_network_tool2.xml***
🤖***Cadastrar Fix HEN XML:*** `Cadastre o FIX HEN XML enviando ela no meu privado exatamente conforme exemplo:` ***category_network.xml***
""",'markdown', reply_markup=keyboard.voltar_comandos_admins)
elif msg['data'] == 'restringir_doadores':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']), """
💾***RESTRINGIR | LIMPAR | RECADASTRAR DOADORES***
```---- Este bot cadastra os Doadores automáticamente, porém se por ventura ele falhar ou mesmo um administrador quiser Cadastar Manualmente o Doador por qualquer eventualidade, seja para conferir um cadastro automatico feito pelo Bot ou para poder dar mais dias de permanência ao Doador!```
🤖***Cadastro automático:*** `Automaticamente ao entrar em um grupo o doador é cadastrado com o prazo de 30 dias de permanencia.`
🤖***Conferir Doadores Cadastrados:*** `Para conferir os cadastros existentes no sistema basta digitar o comando consulta e o arroba do usuário marcando o mesmo que também poderá conferir seu prazo,lembrando que faltando 7 dias para o prazo de banimento do grupo o usuário será notificado sobre para assim poder ou não realizar uma doação e manter sua permanência, use o comando conforme exemplo:`
consulta @UserGamer
🤖***Descadastrar ou Deletar Doador:*** `Descadastrar ou deletar um Doador é necessário para que possa ser feita a inclusão de mais dias na sua conta, para isto basta usar o comando seguido do arroba do Doador conforme exemplo:`
limpar @Mst3Dz
🤖***Cadastrar Manualmente um Doador:*** `Para cadastrar manualmente o doador é necessário pegar sua ID, para isto basta pegar qualquer mensagem deste doador e responder com o comando /id, após ter a ID do Doador tenha certeza que o mesmo não existe no Banco de Dados, para isto realize uma consulta e caso o Doador esteja cadastrado delete ele conforme instruções para deletar. Caso usuário não conste no Banco de Dados ou já tenha sido deletado execute o comando conforme exemplos:` ***restringir @usuario id_usuario quantidade_dias***
`Exemplo na prática:` restringir @MsT3Dz 628238139 300000
🤖***Depois de Banido oque acontece:*** `Após o doador ser banido os administradores são notificados, o nome deste doador é limpo do banco de dados e da lista de restritos do grupo, caso ele faça uma nova doação basta adiciona-lo no grupo sem a necessidade de qualquer comando.`
""", 'markdown', reply_markup=keyboard.voltar_comandos_admins)
elif msg['data'] == 'perguntas_admins':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']), """
💾***SISTEMA DE PERGUNTAS E RESPOSTAS PARA ADMINS***
```---- Este bot grava todas perguntas desde que contenham ??, avise seus usuários que quando quiserem cadastrar uma pergunta usem duas interrogações no final da frase e automáticamente sua pergunta será cadstrada e assim que um administrador ver pode responder ou cadastrar ela no robo ensinando a Inteligência Artificial.```
🤖`Cadastrar pergunta exemplo:` Como faço para ser tao esperto como o robo??
🤖`Ver perguntas cadastradas apenas digite:` perguntas
🤖`Limpar perguntas cadastradas ou já respondidas digite:` apagar perguntas
""", 'markdown', reply_markup=keyboard.voltar_comandos_admins)
elif msg['data'] == 'admin_frequencia':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),"""
💾***SOBRE A FREQUENCIA DE MENSAGENS***
```---- Este bot envia mensagens baseado em uma frequencia que deve ser setada entre 2 e 10, onde:```
🤖`frequencia 0 = mudo`
🤖`frequencia 2 = fala pouco`
🤖`frequencia 10 = fala muito`
""",'markdown', reply_markup=keyboard.voltar_comandos_admins)
elif msg['data'] == 'admin_proibicoes':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),"""
💾***SOBRE PROIBIR E PERMITIR PALAVRAS***
```---- Este bot pode restringir/permitir palavras com os comandos:```
🤖`proibir uma palavra:` proibir corno
🤖`permitir uma palavra:` permtir corno
🤖`ver palavras proibidas:` proibidas
""",'markdown', reply_markup=keyboard.voltar_comandos_admins)
elif msg['data'] == 'admin_inteligencia':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),"""
💾***SOBRE O ENVIO DE MENSAGENS DA IA***
```---- Este bot envia mensagens baseado em dois tipos de inteligência, uma local e outra global, onde a local é tudo que aprendeu naquele grupo e ja a global é oque ele aprendeu por onde passou, veja exemplos:```
🤖`inteligencia local = irá falar somente sobre oque aprendeu neste grupo, comando:`
inteligencia local
🤖`inteligencia global = ira falar sobre tudo que aprendeu em todos os lugares que passou na internet`
inteligencia global
🤖`fale sobre = ele fala sobre determinado assunto, exemplo:`
fale sobre playstation
""",'markdown', reply_markup=keyboard.voltar_comandos_admins)
elif msg['data'] == 'admin_banimento':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']), """
👮***SOBRE O SISTEMA AUTOMATICO DE BANIMENTO DA IA***
```---- Este bot envia conta com um sistema de banimento automático que pode ser ativado ou desativado no menu de configurações. Quando ativado, cada usuário que entrar terá a permanencia de 35 dias cadastrado de forma automática e caso estes dias não sejam renovados/adicionados o usuário tomará ban, este sistema é util para grupos pagos ou grupos que tem doadores ou mensalidades.```
🤖`Sistema ativado = irá cadastar automaticamente e banir todos usuarios quando vencerem seu prazo, com 7 dias antes do vencimento a IA irá marcar o usuário no grupo avisando o mesmo.`
🤖`Sistema desativado = mesmo com sistema desativado ainda é possivel cadastrar usuários de forma manual e fazer a varredura para o banimento de forma manual, igualmente em um horario pre-determinado pelo desenvolvedor este bot irá aplicar o sistema de ban automatico então não precisa se preocupar em passar a verificação, basta apenas adicionar seus usuários manualmente ou inserir mais dias manualmente para eles conforme instruções no botao de Doadores.`
""", 'markdown', reply_markup=keyboard.voltar_comandos_admins)
elif msg['data'] == 'admin_reconhecimento':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']), """
🕵️️***SOBRE O SISTEMA DE RECONHECIMENTO DA IA***
```---- Este bot envia conta com um sistema de reconhecimento com Deep e Machine Learning utilizando as Lib's Tensorflow, Keras e OpenCV.```
🤖`Para usar este sistema basta responder qualquer imagem com o comando /recog e aguardar a IA enviar sua imagem com os objetos reconhecidos nela. Esta opção nem sempre esta disponível devido uso de GPU de quem hospeda a IA.`
""", 'markdown', reply_markup=keyboard.voltar_comandos_admins)
#AREA DE BOTOES DO DESENVOLVEDOR------------------------------------------------------------------------------->>>>>>>>>
elif msg['data'] == 'area_dev':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"```------ Os comandos e configurações listadas nesta categoria funcionam apenas para quem desenvolve ou hospeda o Bot, nenhum ou muito destes comandos não funcionam com usuários nem administradores, igualmente é importante que todos saibam deles e em baso de algum bug envie:``` /bug seguido doque tem para reportar ao Desenvolvedor.",'markdown', reply_markup=keyboard.desenvolvedor)
elif msg['data'] == 'dev_info':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),
'''• Manicomio Bot
Version: {version}
Nosso site: <a href="https://tcxsproject.com.br">Manicomio TCXS Project</a>
Developers: <a href="https://github.com/gorpo">GorpoOrko</>
Partnerships:
» <a href="https://t.me/tcxsproject2">telegram</>
©2020 - <a href="https://t.me/tcxsproject2">TCXS Project™</>
'''.format(version=version),parse_mode='html',reply_markup=keyboard.desenvolvedor, disable_web_page_preview=True)
elif msg['data'] == 'dev_inteligencia':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),
'''• Inteligencia Artificial | Versão: {version}
O sistema de Inteligência Artificial foi implantado unindo Banco de Dados Sqlite3 e algumas bibliotecas, este bot possui um sistema de envio de mensagens local e global, no qual ele se baseia no que aprendeu com oque os usuários falam ou cadastram manualmente em seu banco de dados, conta tambem com sistema de banimento automático ou com data marcada inseridos manualmente via comandos explicados claramente nos menus do bot. Além de falar, interagir e aprender com usuários, este bot também é capaz de restringir palavras ou frases proibindo elas como explicado nos menus do bot. Todas configurações inseridas no Bot foram feitas em Python com uso de Machine e Deep Learning, graças a este sistema este bot tem um sistema de reconhecimento visual onde quando respondida uma imagem com o comando "recog" ele irá aplicar o reconhecimento. Outra peculiaridade da IA é seu poder de converter voz em texto assim facilitando a comunicação dos grupos e pensando de forma global este bot pode traduzir inumeras linguas com seu comando /tr isto facilitará a comunicação com todo mundo! Falando um pouco sobre seu sistema de frequencia, este bot pode ser mutado, falar pouco ou muito dependendo das suas configurações de "frequencia" como listado nos menus. O Grande poder da IA também conta com um sistema de correção de texto onde quando respondido com comando "corrigir" ele irá buscar erros em uma frase e apontar os erros. Perguntas dos usuários do grupo jamais passarão em branco, pois este bot conta com um sistema de cadastro de perguntas, desde que o usuário faça sua pergunta e no final tenha duas interrogações "??". O Sistema de Cadastramento e Banimento automático pode ser facilmente ativado ou desativado em seu grupo, bem como usuários podem ser cadastrados automáticamente ao entrar no grupo ou de forma manual pelos administradores conforme explicações nos menus. Esta IA conta com um sistema global de pesquisas respondendo assim oque você quiser, basta executar o comando "fale sobre" e seu tema e ele irá responder! Caso a IA não responda sua pergunta por não conhecer a reposta ou não se comunique com você com um determinado tema, basta adicionar manualemente os comandos conforme explicado nos menus.
Criação de páginas Web pra rascunhos rápidos não é problema para nossa IA, ela posta seu codigo, texto ou conteudo nas plataformas Dogbin, Hastebin e Telegraph onde este ultimo é um site(página web). Ah quem não acredite mas esta IA é capaz de criar links diretos de arquivos do telegram desde que estes não passem de 20mb. Ele também é capaz de fazer varredura em links com Crawling (hacking), informar dados de IP dentre outras ferramentas hackers. Encurtar URL's com nossa IA não é problema e ainda ela te ajuda a saber o clima de sua cidade e varias informações da internet como pesquisar e baixar no youtube e outras plataformas. Crie seus pacotes de Stickers de forma automatizada com esta IA e tenha total acesso aos Avatar's de seus usuários com o comando /avatar . Todas ferramentas possíveis para Gerenciamento de Grupos podem ser encontradas em nossa IA, confira os menus. Quer restringir arquivos somente a pessoas que colaboram finaneceiramente com seu projeto, esta IA também faz isto separando seus usuários e limitando comandos. Nossa IA conta com os mais diversos Interpretadores de Linguagem de Programaçao Novas informações sempre serão passadas através dos menus, em caso de problemas ou bug's reporte ao Desenvolvedor com o comando /bug seguido de seu texto.
@GorpoOrko | Python Developer ©2020 - <a href="https://t.me/tcxsproject2">TCXS Project™</>'''.format(version=version), parse_mode='html', reply_markup=keyboard.desenvolvedor, disable_web_page_preview=True)
elif msg['data'] == 'comandos_dev':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),"""
[*] COMANDOS APENAS PARA DESENVOLVEDOR [*]
Os comandos abaixo funcionam apenas para quem hospeda o bot, somente o desenvolvedor tem acesso a estes comandos!
!apagar mensagens - apaga tudo IA e faz backup da Database.
!backup - Faz backup do bot e upload para o Dropbox.
!update - Atualiza o bot de acordo com codigo postado no Github.
!cmd - Executa um comando.
!chat - Obtem infos de um chat.
!del - Deleta a mensagem respondida.
!doc - Envia um documento do server.
!eval - Executa uma função Python.
!exec - Executa um código Python.
!leave - O bot sai do chat.
!plist - Lista os plugins ativos.
!promote - Promove alguém a admin.
!restart - Reinicia o bot.
!upgrade - Atualiza a base do bot.(deprecated)
!upload - Envia um arquivo para o servidor.
!baixar - baixa um documento para o server
!dropbox - faz upload para o Dropbox
!link - gera um link direto do Telegram
| - Define desligamento do bot, EX: 12|30""",'markdown', reply_markup=keyboard.voltar_comandos_admins)
#return True
#FERRAMENTAS GERAIS------------------------------------------------------------------------------------------------------------------------------------------------->
#menus de ferramentas:
elif msg['data'] == 'ferramentas_gerais':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"```------ Informações extras ou complementares sobre o Bot ou Projeto TCXS Store PS3 Hacker Team.```",'markdown', reply_markup=keyboard.ferramentas_gerais)
#return True
elif msg['data'] == 'ferramenta_comandos':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),"""
/tr -traduz um texto
/yt -pesquisa videos no YouTube
/r -pesquisa um termo no redit
/clima -exibe informacoes de clima
/coub -pesquisa de pequenas animações
/dados -jogo de dados
/gif -gif do giphy
/git -usuario do github
/id -id do usuario
/ip -informa dados de um ip
/jsondump -retorna dados formatados
/stickerid -pega id de um sticker
/getsticker -baixa um sticker
/pypi -pesquisa libs python
/rextester -interpretador de varias linguagens de programação
/mark -repete o texto informado usando Markdown
/html -repete o texto informado usando HTML
/request -faz uma requisicao a um site
/rt -repete concordando com o usuario na reposta
/fala -Repete o texto que voce pedir para ele falar
/print -gera um print doque falou
/dogbin - envia seu material em texto para o dogbin
/hastebin - envia seu material em texto para o hastebin
/echo - Repete o texto informado.
/shorten - Encurta uma URL.
/recog - reconhecimento com IA (nem sempre disponivel)
/notepad - cria um site com o texto enviado
/crawler - pega todos links dentro de um site
/corrigir - corrige palavras erradas
/token - Exibe informaces de um token de outro bot.""", reply_markup=keyboard.voltar_ferramentas_gerais)
#return True
elif msg['data'] == 'ferramenta_reconhecimento':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']), """
🕵️️***SOBRE O SISTEMA DE RECONHECIMENTO DA IA***
```---- Este bot envia conta com um sistema de reconhecimento com Deep e Machine Learning utilizando as Lib's Tensorflow, Keras e OpenCV.```
🤖`Para usar este sistema basta responder qualquer imagem com o comando /recog e aguardar a IA enviar sua imagem com os objetos reconhecidos nela.`
""", 'markdown', reply_markup=keyboard.voltar_ferramentas_gerais)
elif msg['data'] == 'ferramenta_perguntas':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']), """
💾***SISTEMA DE PERGUNTAS E RESPOSTAS PARA ADMINS***
```---- Este bot grava todas perguntas desde que contenham ??, avise seus usuários que quando quiserem cadastrar uma pergunta usem duas interrogações no final da frase e automáticamente sua pergunta será cadstrada e assim que um administrador ver pode responder ou cadastrar ela no robo ensinando a Inteligência Artificial.```
🤖`Cadastrar pergunta exemplo:` Como faço para ser tao esperto como o robo??
🤖`Ver perguntas cadastradas apenas digite:` perguntas
🤖`Limpar perguntas cadastradas ou já respondidas digite:` apagar perguntas
""", 'markdown', reply_markup=keyboard.voltar_ferramentas_gerais)
elif msg['data'] == 'ferramenta_frequencia':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),"""
💾***SOBRE A FREQUENCIA DE MENSAGENS***
```---- Este bot envia mensagens baseado em uma frequencia que deve ser setada entre 2 e 10,este comando pode funcionar somente para administradores dependendo das configurações, seus comandos são:```
🤖`frequencia 0 = mudo`
🤖`frequencia 2 = fala pouco`
🤖`frequencia 10 = fala muito`
""",'markdown', reply_markup=keyboard.voltar_ferramentas_gerais)
elif msg['data'] == 'ferramenta_proibicoes':
try:
cursor_sqlite.execute("""SELECT * FROM proibido; """)
mensagens_proibidas = cursor_sqlite.fetchall()
todas_proibidas = []
separador = ' \n'
for result in mensagens_proibidas:
todas_proibidas.append(result['termo'])
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),"""
💾***SOBRE PROIBIR E PERMITIR PALAVRAS***
```---- Este bot pode restringir/permitir palavras, este comando pode funcionar somente para administradores dependendo das configurações, altere as proibições de palavras ou frases, link etc... com os comandos:```
🤖`proibir uma palavra:` proibir corno
🤖`permitir uma palavra:` permtir corno
🤖`ver palavras proibidas:` proibidas
""",'markdown', reply_markup=keyboard.voltar_ferramentas_gerais)
time.sleep(2)
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),
f'`Palavras Proibidas:`\n ***{separador.join(map(str, todas_proibidas))}***\nPara proibir use `proibir` e para permitir use `permitir`.',
'markdown', reply_markup=keyboard.voltar_ferramentas_gerais)
except:
pass
elif msg['data'] == 'ferramenta_inteligencia':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),"""
💾***SOBRE O ENVIO DE MENSAGENS DA IA***
```---- Este bot envia mensagens baseado em dois tipos de inteligência, uma local e outra global, onde a local é tudo que aprendeu naquele grupo e ja a global é oque ele aprendeu por onde passou,este comando pode ser restrito a administradores, veja exemplos:```
🤖`inteligencia local = irá falar somente sobre oque aprendeu neste grupo, comando:`
inteligencia local
🤖`inteligencia global = ira falar sobre tudo que aprendeu em todos os lugares que passou na internet`
inteligencia global
🤖`fale sobre = ele fala sobre determinado assunto, exemplo:`
fale sobre playstation
""",'markdown', reply_markup=keyboard.voltar_ferramentas_gerais)
# mostra os comandos cadastrados manualmente para os admins
elif msg['data'] == 'ferramenta_comandos_manuais':
adm = await is_admin(msg['message']['chat']['id'], msg['from']['id'], msg['from']['id'])
if adm['user'] == True:
try:
cursor_sqlite.execute("""SELECT * FROM comandos; """)
resultados = cursor_sqlite.fetchall()
todos_comandos = []
separador = ' \n'
for result in resultados:
todos_comandos.append(result['comando'])
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),
f'`Comandos cadastrados:`\n ***{separador.join(map(str, todos_comandos))}***',
'markdown', reply_markup=keyboard.voltar_ferramentas_gerais)
except Exception as e:
pass
elif msg['data'] == 'ferramenta_lista_jogos':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),
f"```------ Tenho esta lista de jogos em PKG para PlayStation3 todos com links diretos, estes jogos são originais, portanto precisam de Licenças ou como conhecemos precisam dos Rap's```\n`Basta clicar no botão que te trarei a lista com link direto para download, pedimos sua contribuição para que este projeto se mantenha vivo, Obrigado a todos da TCXS!` ",
'markdown', reply_markup=keyboard.lista_jogos)
elif msg['data'] == 'ferramenta_cria_xml':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']), """***Temos um programa de computador que cria lojas diretamente no console PlayStation3***
/tcxspyject -criar lojas
***Criar XML PSP Instruções:***
`comando:`/psp -cria xml para loja
```
1 - meu comando sempre começa com /xml
2 - eu não aceito espaços no nome de arquivo, nome de jogo e nem na descrição!
3 - você pode copiar o caractere especial invisivel dentro das aspas abaixo para usar onde precisar de espaço!```
`Copie de dentro das aspas o caractere invisivel:`"⠀"
**VAMOS AO COMANDO EM SI**
`Exemplo com caractere invisivel:`
``` gow god⠀of⠀war descriçao⠀usando⠀caractere⠀invisivel www.linkdropbox.com```
`Exemplo sem caractere visivel:`
``` /psp gow god_of_war descrição_sem_caractere_visivel www.linkdropbox.com```
**Onde cada campo:**
`/psp` ```- chama comando```
`gow` ```- nome do xml```
`god_of_war` ```- nome do jogo, se quiser tirar os _ usar caractere especial no lugar```
`descrição_do_jogo` ```- descrição, se quiser tirar os _ usar caractere especial no lugar```
`www.linkdropbox.com` ```- Link do Dropbox```
***Criar XML PS1 Instruções:***
`comando:`/ps1 -cria xml para loja
```
1 - meu comando sempre começa com /xml
2 - eu não aceito espaços no nome de arquivo, nome de jogo e nem na descrição!
3 - você pode copiar o caractere especial invisivel dentro das aspas abaixo para usar onde precisar de espaço!```
`Copie de dentro das aspas o caractere invisivel:`"⠀"
**VAMOS AO COMANDO EM SI**
`Exemplo com caractere invisivel:`
``` gow god⠀of⠀war descriçao⠀usando⠀caractere⠀invisivel www.linkdropbox.com```
`Exemplo sem caractere visivel:`
``` /ps1 gow god_of_war descrição_sem_caractere_visivel www.linkdropbox.com```
**Onde cada campo:**
`/ps1` ```- chama comando```
`gow` ```- nome do xml```
`god_of_war` ```- nome do jogo, se quiser tirar os _ usar caractere especial no lugar```
`descrição_do_jogo` ```- descrição, se quiser tirar os _ usar caractere especial no lugar```
`www.linkdropbox.com` ```- Link do Dropbox```
***Criar XML PS2 Instruções:***
`comando:`/ps2 -cria xml para loja
```
1 - meu comando sempre começa com /xml
2 - eu não aceito espaços no nome de arquivo, nome de jogo e nem na descrição!
3 - você pode copiar o caractere especial invisivel dentro das aspas abaixo para usar onde precisar de espaço!```
`Copie de dentro das aspas o caractere invisivel:`"⠀"
**VAMOS AO COMANDO EM SI**
`Exemplo com caractere invisivel:`
``` gow god⠀of⠀war descriçao⠀usando⠀caractere⠀invisivel www.linkdropbox.com```
`Exemplo sem caractere visivel:`
``` /ps2 gow god_of_war descrição_sem_caractere_visivel www.linkdropbox.com```
**Onde cada campo:**
`/ps2` ```- chama comando```
`gow` ```- nome do xml```
`god_of_war` ```- nome do jogo, se quiser tirar os _ usar caractere especial no lugar```
`descrição_do_jogo` ```- descrição, se quiser tirar os _ usar caractere especial no lugar```
`www.linkdropbox.com` ```- Link do Dropbox```
***Criar XML PS3 Instruções:***
`comando:`/ps3 -cria xml para loja
```
1 - meu comando sempre começa com /xml
2 - eu não aceito espaços no nome de arquivo, nome de jogo e nem na descrição!
3 - você pode copiar o caractere especial invisivel dentro das aspas abaixo para usar onde precisar de espaço!
4 - meu sistema para por jogos de PS3 aceitam apenas 3 links preciso deles como exemplos.```
`Copie de dentro das aspas o caractere invisivel:`"⠀"
**VAMOS AO COMANDO EM SI**
`Exemplo com caractere invisivel:`
``` gow god⠀of⠀war descriçao⠀usando⠀caractere⠀invisivel www.linkdropbox.com www.linkdropbox.com www.linkdropbox.com```
`Exemplo sem caractere visivel:`
``` /ps3 gow god_of_war descrição_sem_caractere_visivel www.linkdropbox.com www.linkdropbox.com www.linkdropbox.com```
**Onde cada campo:**
`/ps3` ```- chama comando```
`gow` ```- nome do xml```
`god_of_war` ```- nome do jogo, se quiser tirar os _ usar caractere especial no lugar```
`descrição_do_jogo` ```- descrição, se quiser tirar os _ usar caractere especial no lugar```
`www.linkdropbox.com` ```- Link do Dropbox, preciso de 3 links separados por espaço```
/ps3 -cria xml para loja""", 'markdown', reply_markup=keyboard.voltar_ferramentas_gerais)
#INFORMAÇÕES E EXTRAS------------------->
elif msg['data'] == 'infos_extras':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"```------ Aconselhamos que leia atentamente as regras, é de suma importancia saber as regras antes de doar para depois não haver reclamações tanto pela parte dos usuários como por parte da administração, somente após ler e concordar com todos os termos abaixo realize sua doação, ja deixamos claro que não prestamos reembolsos.```",'markdown', reply_markup=keyboard.info_extras)
#return True
elif msg['data'] == 'info_adquirir':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"```------ A TCXS Project fornece e desenvolve o aplicativo para PlayStation3 TCXS Store, para poder ter nosso aplicativo em seu console basta fazer uma doação nos botões deste bot ou pelo site, antes de doar leia atentamente a todas as regras, já quero explicar como funciona a doação, todo montante arrecadado fica preso em uma conta do Mercado Pago a qual é usada para pagar o servidor do Dropbox e outros serviços, ao doar você esta participando de uma vaquinha onde a união de todos doadores mantém a vaquinha no mercado pago assim possibilitando pagar os serviços que usamos, nossa loja não é paga e em momento algum você é obrigado a pagar, fornecemos jogos para download direto aqui neste bot bem como temos uma loja free que tem todos jogos das demais lojas free, a loja ficou definida apenas para doadores a pedido deles, pois o download fica muito mais rápido e não temos mais perda de jogos, ressalto que o grupo de doadores esta limitado apenas a 200 pessoas e caso esteja lotado você terá que esperar alguem sair, continuando... Logo após doar você deve ir em nosso grupo de telegram e procurar por @MsT3Dz ou @Odeiobot e mostrar seu comprovante de doação assim você estará dentro do grupo que contém as novidades, jogos e nossa TCXS Store PKG PlayStation3.```",'markdown', reply_markup=keyboard.voltar_info_extras)
#return True
elif msg['data'] == 'info_doacao':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"""```------ As doações são feitas pelo mercado pago, onde aceitamos todos os cartões, pagamentos online e boletos.
Não prestamos reembolsos pois se trata de doações e não uma venda direta para uso dos serviços!
O material completo é apenas para doadores. Além do projeto para PlayStation3 a TCXS Project conta com inumeros projetos e Sites para seu entreterimento. Após fazer sua doação basta ir no grupo de TELEGRAM e procurar pelo nosso administrador @MsT3Dz ou @Odeiobot , enviar um print de seu comprovante de pagamento que ele irá fornecer acesso a todo material, exigimos que seja feito o pedido no grupo! Outros administradores não irão te responder no privado, contamos com seu bom senso e cordialidade! NÃO PRESTAMOS REEMBOLSOS!
Queremos deixar a todos cientes que as doações feitas são exclusivas para pagar os servidores da Dropbox e serviços como hospedagem de site, sendo assim nos adm’s declaramos não receber nenhum valor neste projeto sendo assim nosso trabalho voluntário e todo e qualquer que queira entrar na equipe para ajudar a contribuir de forma expontanêa é bem vindo. Nossa equipe desenvolve sem cobrar nada pela sua mão de obra os sites acima citados bem como o desenvolvimento da TCXS Store PKG e a conversão e upload de jogos dentro dos servidores da Dropbox para assim os fornecer em formato NO-HAN para os usuários, fornecemos dentro da Plataforma PlayStation3 jogos de PS2, PS2, PsP, Emuladores das mais diversas plataformas! Álem disto disponibilizamos aos usuários a experiencia de ter sites para download de jogos nas mais variadas paltaformas e em especial jogos de PS3 PKG tudo aberto gratuitamente a comunidade bem como este site e outros sites mencionados aqui e que encontram-se nos menus.```""",'markdown', reply_markup=keyboard.voltar_info_extras)
#return True
elif msg['data'] == 'info_requisitos':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"""```------ Para usar a TCXS Store PKG você precisa ter seu console exploitado ou desbloqueado, nossa loja funciona nos consoles CFW, OFW, nas versões HAN e HEN, porém precis atender alguns requisitos para usar a TCXS Store PKG:
- Console Desbloqueado/exploitado.
- Versão exploit Han/Hen.
- Assinaturas previamente inseridas ( Raps’).
- Configurações de internet corretas.
- Espaço para download de jogos em seu hd.
- Conhecer previamente tudo sobre seu sistema de desbloqueio/exploit.
- Saber solucionar seus erros.
- Estar ciente que ao doar para a TCXS Store você não esta fazendo uma compra e sim ajudando a pagar os servidores da Dropbox onde upamos os jogos.CONSIDERE SE PARTICIPANDO DE UMA VAQUINHA COLETIVA ONDE TODOS USUARIOS DA TCXS AJUDAM NESTA VAQUINHA PARA MANTER O SERVIDOR```""",'markdown', reply_markup=keyboard.voltar_info_extras)
#return True
elif msg['data'] == 'info_suporte':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"""```------ Prestamos suporte somente para nosso aplicativo e jogos, estejam cientes que:
Suporte será prestado somente para a TCXS Store.
Suporte será prestado somente para jogos que são convertidos pela equipe.
Por se tratar de copias modificadas de jogos nossos jogos constantemente são reupados.
Por se tratar de copias modificadas ao cair dos links, os mesmos após conteúdo upado, são substitúidos na TCXS Store PKG.
Tenha ciencia de que links podem vir a cair ( não temos frequencia disto).
Saiba que a administração não presta suporte para seu desbloqueio e exploit, mas aconselhamos levar em um técnico competente caso não saiba realizar as operações básicas e avançadas de seu console.
Caso queira se aventurar em aprender tudo sobre seu desbloqueio ou exploit aconselhamos o fórum da PSX Place que são os desenvolvedores do desbloqueio e exploit, não iremos dar suporte ao material de terceiros ou erros cometidos por usuarios ou consoles vindo de tecnicos que não fizeram um bom exploit ou um bom desbloqueio.```""",'markdown', reply_markup=keyboard.voltar_info_extras)
#return True-------------------
#------------------------------------------------------------------------------
#CONFIGURAÇÕES DOS ADMINISTRADORES E DA IA DO BOT
elif msg['data'] == 'btn_banimento':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"***{msg['from']['first_name']}, clique nos botões para ativar ou desativar o Sistema de Banimento Automático.***",'markdown', reply_markup=keyboard.configuracoes_banimento)
elif msg['data'] == 'admin_configuracoes':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"***Configurações da Inteligência Artificial:***\n`Instruções nos menus anteriores.`",'markdown', reply_markup=keyboard.configuracoes)
elif msg['data'] == 'btn_perguntas':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"***{msg['from']['first_name']}, clique nos botões e confira as perguntas cadastradas ou apague as perguntas da Inteligência Artificial.***",'markdown', reply_markup=keyboard.configuracoes_perguntas)
elif msg['data'] == 'btn_frequencia':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"***{msg['from']['first_name']}, clique nos botões e a frequência de fala da Inteligência Artificial.***",'markdown', reply_markup=keyboard.configuracoes_frequencia)
elif msg['data'] == 'btn_inteligencia':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"***{msg['from']['first_name']}, clique nos botões e altere o conhecimento Inteligência Artificial.***",'markdown', reply_markup=keyboard.configuracoes_inteligencia)
elif msg['data'] == 'btn_logs':
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"***{msg['from']['first_name']}, clique nos botões para baixar os logs salvos ou limpar e baixar os logs.***",'markdown', reply_markup=keyboard.configuracoes_logs)
#ENTREGA DE LOGS COM BOTOES - APENAS ADMS
elif msg['data'] == 'btn_verlogs':
adm = await is_admin(msg['message']['chat']['id'], msg['from']['id'], msg['from']['id'])
if adm['user'] == True:
try:
cursor_logs.execute("""SELECT * FROM mensagens ; """)
mensagens_logs = cursor_logs.fetchall()
arquivo_logs = open('arquivos/logs.txt', 'a', encoding='utf-8')
arquivo_logs.write('-------[+] REGISTO DE MENSAGENS CAPTADAS PELO BOT NOS GRUPOS E PRIVADO [+]-------\n\n')
for mensagem in mensagens_logs:
grupo = mensagem['grupo']
tipo_grupo = mensagem['tipo_grupo']
id_grupo = mensagem['id_grupo']
usuario = mensagem['usuario']
id_usuario = mensagem['id_usuario']
linguagem = mensagem['linguagem']
tipo = mensagem['tipo']
data = mensagem['data']
id_mensagem = mensagem['id_mensagem']
mensagem = mensagem['mensagem']
try:
texto = f"Usuario: {usuario} |Id Usuario:{id_usuario} | Linguagem: {linguagem} | Grupo: {grupo} | Id Grupo: {id_grupo} | Tipo Grupo: {tipo_grupo} Tipo: {tipo} | Data: {data} ----->\nMensagem: {mensagem}\n"
except:
texto = ''
arquivo_logs.write(texto)
arquivo_logs.close()
await bot.sendDocument(msg['message']['chat']['id'], open('arquivos/logs.txt', 'rb'),reply_to_message_id=msg['message']['message_id'])
os.remove('arquivos/logs.txt')
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),'`Esta aqui o log de conversas que tenho armazenado:` ','markdown', reply_markup=keyboard.voltar_configuracoes)
except Exception as e:
pass
else:
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),'`Este comando é permitido somente para administradores.` ','markdown', reply_markup=keyboard.voltar_configuracoes)
#ENTREGA DE LOGS COM BOTOES - APENAS ADMS
elif msg['data'] == 'btn_limparlogs':
adm = await is_admin(msg['message']['chat']['id'], msg['from']['id'], msg['from']['id'])
if adm['user'] == True:
try:
cursor_logs.execute("""SELECT * FROM mensagens; """)
mensagens_logs = cursor_logs.fetchall()
arquivo_logs = open('arquivos/logs.txt', 'a',encoding='utf-8')
arquivo_logs.write('-------[+] REGISTO DE MENSAGENS CAPTADAS PELO BOT NOS GRUPOS E PRIVADO [+]-------\n\n')
for mensagem in mensagens_logs:
grupo = mensagem['grupo']
tipo_grupo = mensagem['tipo_grupo']
id_grupo = mensagem['id_grupo']
usuario = mensagem['usuario']
id_usuario = mensagem['id_usuario']
linguagem = mensagem['linguagem']
tipo = mensagem['tipo']
data = mensagem['data']
id_mensagem = mensagem['id_mensagem']
mensagem = mensagem['mensagem']
try:
texto = f"Usuario: {usuario} |Id Usuario:{id_usuario} | Linguagem: {linguagem} | Grupo: {grupo} | Id Grupo: {id_grupo} | Tipo Grupo: {tipo_grupo} Tipo: {tipo} | Data: {data} ----->\nMensagem: {mensagem}\n"
except:
texto = ''
arquivo_logs.write(texto)
arquivo_logs.close()
await bot.sendDocument(msg['message']['chat']['id'], open('arquivos/logs.txt', 'rb'),reply_to_message_id=msg['message']['message_id'])
os.remove('arquivos/logs.txt')
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),'`Esta aqui o Backup de logs de conversas que tenho armazenado, caso preciso guarde este arquivo pois irei limpar a Database.` ','markdown', reply_markup=keyboard.voltar_configuracoes)
cursor_logs.execute("""DELETE FROM mensagens""")
conexao_logs.commit()
time.sleep(2)
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),'`Todos os logs dos usuários foram apagados da Database. Mantenha um backup deste arquivo!` ','markdown', reply_markup=keyboard.voltar_configuracoes)
except Exception as e:
pass
else:
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),'`Este comando é permitido somente para administradores.` ','markdown', reply_markup=keyboard.voltar_configuracoes)
#mostra os comandos cadastrados manualmente para os admins
elif msg['data'] == 'btn_comandos':
adm = await is_admin(msg['message']['chat']['id'], msg['from']['id'], msg['from']['id'])
if adm['user'] == True:
try:
cursor_sqlite.execute("""SELECT * FROM comandos; """)
resultados = cursor_sqlite.fetchall()
todos_comandos = []
separador = ' \n'
for result in resultados:
todos_comandos.append(result['comando'])
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f'`Comandos cadastrados:`\n ***{separador.join(map(str, todos_comandos))}***\n `Veja como cadastrar comandos no menu de administradores` ','markdown', reply_markup=keyboard.voltar_configuracoes)
except Exception as e:
pass
elif msg['data'] == 'btn_proibicoes':
adm = await is_admin(msg['message']['chat']['id'], msg['from']['id'], msg['from']['id'])
if adm['user'] == True:
try:
cursor_sqlite.execute("""SELECT * FROM proibido; """)
mensagens_proibidas = cursor_sqlite.fetchall()
todas_proibidas = []
separador = ' \n'
for result in mensagens_proibidas:
todas_proibidas.append(result['termo'])
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),
f'`Palavras Proibidas:`\n ***{separador.join(map(str, todas_proibidas))}***\nPara proibir use `proibir` e para permitir use `permitir`.',
'markdown', reply_markup=keyboard.voltar_configuracoes)
except:
pass
# botoes que ativam ou desativam o banimento
elif msg['data'] == 'btn_ativaban':
adm = await is_admin(msg['message']['chat']['id'], msg['from']['id'], msg['from']['id'])
if adm['user'] == True:
try:
try:
grupo = f"https://t.me/{msg['message']['chat']['username']}"
except:
grupo = f"Secreto: {msg['message']['chat']['title']}"
pass
try:
admin = msg['message']['from']['username']
except:
admin = f"@{msg['message']['from']['id']}({msg['message']['from']['first_name']})"
pass
data = datetime.now().strftime('%d/%m/%Y %H:%M')
chat_type = msg['message']['chat']['type']
id_grupo = msg['message']['chat']['id']
id_admin = msg['message']['from']['id']
valor = 1
cursor_sqlite.execute(f"""DELETE FROM banimento WHERE id_grupo='{msg['message']['chat']['id']}' """)
cursor_sqlite.execute(f"""INSERT INTO banimento (int_id, grupo, tipo_grupo, id_grupo, admin, id_admin, data, valor)VALUES(null,'{grupo}','{chat_type}','{id_grupo}','{admin}','{id_admin}','{data}','{valor}')""")
conexao_sqlite.commit()
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"🤖 `Sistema de Cadastramento Automático para Banimento:`***ATIVO***\nAgora todos usuários que entrarem no grupo receberão uma data limite de permanencia, caso queira remover restriçao do usuário ou inserir mais dias de permanência consulte /help", 'markdown',reply_markup=keyboard.configuracoes)
except Exception as e:
pass
# botoes que ativam ou desativam o banimento
elif msg['data'] == 'btn_desativaban':
adm = await is_admin(msg['message']['chat']['id'], msg['from']['id'], msg['from']['id'])
if adm['user'] == True:
try:
try:
grupo = f"https://t.me/{msg['message']['chat']['username']}"
except:
grupo = f"Secreto: {msg['message']['chat']['title']}"
pass
try:
admin = msg['message']['from']['username']
except:
admin = f"@{msg['message']['from']['id']}({msg['message']['from']['first_name']})"
pass
data = datetime.now().strftime('%d/%m/%Y %H:%M')
chat_type = msg['message']['chat']['type']
id_grupo = msg['message']['chat']['id']
id_admin = msg['message']['from']['id']
valor = 1
cursor_sqlite.execute(
f"""DELETE FROM banimento WHERE id_grupo='{msg['message']['chat']['id']}' """)
cursor_sqlite.execute(
f"""INSERT INTO banimento (int_id, grupo, tipo_grupo, id_grupo, admin, id_admin, data, valor)VALUES(null,'{grupo}','{chat_type}','{id_grupo}','{admin}','{id_admin}','{data}','{valor}')""")
conexao_sqlite.commit()
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']), f"🤖 `Sistema de Cadastramento Automático para Banimento:`***INATIVO***\nAgora todos usuários que entrarem no grupo não receberão uma data limite de permanncia, caso queira adicionar ou remover restriçao do usuário ou inserir mais dias de permanência de forma manual consulte /help",'markdown', reply_markup=keyboard.configuracoes)
except Exception as e:
pass
#botao para verificar as perguntas
elif msg['data'] == 'ver_perguntas':
try: # VERIFICAR PERGUNTAS DOS USUARIOS----------------------------------------------------------->
cursor_sqlite.execute("""SELECT * FROM perguntas""")
resultados = cursor_sqlite.fetchall()
if resultados == []:
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"🤖 {msg['from']['first_name']} `não tenho perguntas cadastradas, tente outra hora ou cadastre algumas perguntas.`",'markdown', reply_markup=keyboard.configuracoes)
else:
for resultado in resultados:
usuario = resultado['usuario']
pergunta = resultado['pergunta']
try:
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"🤖 `Usuário:`{usuario}\n`Pergunta:`{pergunta}",'markdown', reply_markup=keyboard.configuracoes)
time.sleep(1)
except:
pass
except Exception as e:
pass
#botao para apagar as perguntas
elif msg['data'] == 'apagar_perguntas' :
adm = await is_admin(msg['message']['chat']['id'], msg['from']['id'], msg['from']['id'])
if adm['user'] == True:
try: # LIMPAR PERGUNTAS DOS USUARIOS------------------------------------------------------------->
cursor_sqlite.execute("""DELETE FROM perguntas""")
conexao_sqlite.commit()
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"🤖 `Todas perguntas foram apagadas!`",'markdown', reply_markup=keyboard.configuracoes)
except Exception as e:
pass
#bota a frequencia da IA em baixa valor 1
elif msg['data'] == 'frequencia_baixa':
adm = await is_admin(msg['message']['chat']['id'], msg['from']['id'], msg['from']['id'])
if adm['user'] == True:
try:
grupo = f"https://t.me/{msg['message']['chat']['username']}"
except:
grupo = f"Secreto: {msg['message']['chat']['title']}"
pass
valor = '1'
cursor_sqlite.execute("""SELECT * FROM frequencia; """)
frequencias = cursor_sqlite.fetchall()
comparar_vazio = []
freq = list(frequencias)
if freq == comparar_vazio:
cursor_sqlite.execute(f"""INSERT INTO frequencia(id_grupo, grupo, valor)VALUES('{msg['message']['chat']['id']}','{grupo}','{valor}')""")
conexao_sqlite.commit()
else:
for frequencia in frequencias: # loop em todos resultados da Database
if frequencia['id_grupo'] == msg['message']['chat']['id']:
cursor_sqlite.execute(f"""DELETE FROM frequencia WHERE id_grupo='{msg['message']['chat']['id']}'""")
conexao_sqlite.commit()
cursor_sqlite.execute(f"""INSERT INTO frequencia(id_grupo, grupo, valor)VALUES('{msg['message']['chat']['id']}','{grupo}','{valor}')""")
conexao_sqlite.commit()
if frequencia['id_grupo'] != msg['message']['chat']['id']:
cursor_sqlite.execute(f"""INSERT INTO frequencia(id_grupo, grupo, valor)VALUES('{msg['message']['chat']['id']}','{grupo}','{valor}')""")
conexao_sqlite.commit()
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"🤖 `Frequencia alterada para {valor}, vou tentar falar pouco.`",'markdown', reply_markup=keyboard.configuracoes)
# a frequencia alta faz com que o a IA fale mais este valor é multiplicado por 2 e entra em um random
elif msg['data'] == 'frequencia_alta':
adm = await is_admin(msg['message']['chat']['id'], msg['from']['id'], msg['from']['id'])
if adm['user'] == True:
try:
grupo = f"https://t.me/{msg['message']['chat']['username']}"
except:
grupo = f"Secreto: {msg['message']['chat']['title']}"
pass
valor = '6'
cursor_sqlite.execute("""SELECT * FROM frequencia; """)
frequencias = cursor_sqlite.fetchall()
comparar_vazio = []
freq = list(frequencias)
if freq == comparar_vazio:
cursor_sqlite.execute(f"""INSERT INTO frequencia(id_grupo, grupo, valor)VALUES('{msg['message']['chat']['id']}','{grupo}','{valor}')""")
conexao_sqlite.commit()
else:
for frequencia in frequencias: # loop em todos resultados da Database
if frequencia['id_grupo'] == msg['message']['chat']['id']:
cursor_sqlite.execute(f"""DELETE FROM frequencia WHERE id_grupo='{msg['message']['chat']['id']}'""")
conexao_sqlite.commit()
cursor_sqlite.execute(f"""INSERT INTO frequencia(id_grupo, grupo, valor)VALUES('{msg['message']['chat']['id']}','{grupo}','{valor}')""")
conexao_sqlite.commit()
if frequencia['id_grupo'] != msg['message']['chat']['id']:
cursor_sqlite.execute(f"""INSERT INTO frequencia(id_grupo, grupo, valor)VALUES('{msg['message']['chat']['id']}','{grupo}','{valor}')""")
conexao_sqlite.commit()
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"🤖 `Frequencia alterada para {valor}, vou tentar falar bastante.`",'markdown', reply_markup=keyboard.configuracoes)
#frequecnia setada com 0 e deixa o bot mudo
elif msg['data'] == 'frequencia_mudo':
adm = await is_admin(msg['message']['chat']['id'], msg['from']['id'], msg['from']['id'])
if adm['user'] == True:
try:
grupo = f"https://t.me/{msg['message']['chat']['username']}"
except:
grupo = f"Secreto: {msg['message']['chat']['title']}"
pass
valor = '0'
cursor_sqlite.execute("""SELECT * FROM frequencia; """)
frequencias = cursor_sqlite.fetchall()
comparar_vazio = []
freq = list(frequencias)
if freq == comparar_vazio:
cursor_sqlite.execute(f"""INSERT INTO frequencia(id_grupo, grupo, valor)VALUES('{msg['message']['chat']['id']}','{grupo}','{valor}')""")
conexao_sqlite.commit()
else:
for frequencia in frequencias: # loop em todos resultados da Database
if frequencia['id_grupo'] == msg['message']['chat']['id']:
cursor_sqlite.execute(f"""DELETE FROM frequencia WHERE id_grupo='{msg['message']['chat']['id']}'""")
conexao_sqlite.commit()
cursor_sqlite.execute(f"""INSERT INTO frequencia(id_grupo, grupo, valor)VALUES('{msg['message']['chat']['id']}','{grupo}','{valor}')""")
conexao_sqlite.commit()
if frequencia['id_grupo'] != msg['message']['chat']['id']:
cursor_sqlite.execute(f"""INSERT INTO frequencia(id_grupo, grupo, valor)VALUES('{msg['message']['chat']['id']}','{grupo}','{valor}')""")
conexao_sqlite.commit()
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"🤖 `Frequencia alterada para {valor}, vou ficar mudo.`",'markdown', reply_markup=keyboard.configuracoes)
#seta a IA local para os usuários receberem apenas mensagens daquele grupo.
elif msg['data'] == 'inteligencia_local':
adm = await is_admin(msg['message']['chat']['id'], msg['from']['id'], msg['from']['id'])
if adm['user'] == True:
try:
try:
grupo = f"https://t.me/{msg['message']['chat']['username']}"
except:
grupo = f"Secreto: {msg['message']['chat']['title']}"
pass
data = datetime.now().strftime('%d/%m/%Y %H:%M')
tipo = 'IA'
linguagem = 'nenhuma'
usuario = 'admin via botoes'
inteligencia = 'local'
chat_type = msg['message']['chat']['type']
id_grupo = msg['message']['chat']['id']
cursor_sqlite.execute(f"""DELETE FROM inteligencia WHERE id_grupo='{msg['message']['chat']['id']}' """)
cursor_sqlite.execute(f"""INSERT INTO inteligencia (int_id, grupo, tipo_grupo, id_grupo, usuario, id_usuario, linguagem, tipo, data,inteligencia)VALUES(null,'{grupo}','{chat_type}','{id_grupo}','{usuario}','{id_usuario}','{linguagem}','{tipo}','{data}','{inteligencia}')""")
conexao_sqlite.commit()
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"`Inteligencia Artificial:`***{inteligencia}***\nAgora vocês irão receber coisas que aprendi nesta categoria.",'markdown', reply_markup=keyboard.configuracoes)
except Exception as e:
pass
#seta a IA Global para os usuarios receberem todos tipos de mensagem
elif msg['data'] == 'inteligencia_global':
adm = await is_admin(msg['message']['chat']['id'], msg['from']['id'], msg['from']['id'])
if adm['user'] == True:
try:
try:
grupo = f"https://t.me/{msg['message']['chat']['username']}"
except:
grupo = f"Secreto: {msg['message']['chat']['title']}"
pass
data = datetime.now().strftime('%d/%m/%Y %H:%M')
tipo = 'IA'
linguagem = 'nenhuma'
usuario = 'admin via botoes'
inteligencia = 'global'
chat_type = msg['message']['chat']['type']
id_grupo = msg['message']['chat']['id']
cursor_sqlite.execute(f"""DELETE FROM inteligencia WHERE id_grupo='{msg['message']['chat']['id']}' """)
cursor_sqlite.execute(f"""INSERT INTO inteligencia (int_id, grupo, tipo_grupo, id_grupo, usuario, id_usuario, linguagem, tipo, data,inteligencia)VALUES(null,'{grupo}','{chat_type}','{id_grupo}','{usuario}','{id_usuario}','{linguagem}','{tipo}','{data}','{inteligencia}')""")
conexao_sqlite.commit()
await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"`Inteligencia Artificial:`***{inteligencia}***\nAgora vocês irão receber coisas que aprendi nesta categoria.",'markdown', reply_markup=keyboard.configuracoes)
except Exception as e:
pass
#MODELO PARA NAO TER Q FICAR LIMPANDO CODIGO PARA CRIAR MAIS MENUS--------------->
#elif msg['data'] == 'infos_extras':
#await bot.editMessageText((msg['message']['chat']['id'], msg['message']['message_id']),f"```------ Informações extras ou complementares sobre o Bot ou Projeto TCXS Store PS3 Hacker Team.```",'markdown', reply_markup=keyboard.infos_extras)
#return True
| 73.97293 | 2,216 | 0.639963 | 11,851 | 92,910 | 4.993503 | 0.111805 | 0.040387 | 0.034776 | 0.034878 | 0.76177 | 0.742438 | 0.737926 | 0.727568 | 0.719862 | 0.715114 | 0 | 0.003315 | 0.227295 | 92,910 | 1,255 | 2,217 | 74.031873 | 0.813754 | 0.043192 | 0 | 0.709839 | 0 | 0.093373 | 0.632469 | 0.047566 | 0 | 0 | 0 | 0.001594 | 0 | 1 | 0 | false | 0.029116 | 0.01004 | 0 | 0.01004 | 0.00502 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
574f6dfff72fc069cc18313624d1235b5ce323fa | 20,277 | py | Python | kepconfig/connectivity/tag.py | PTCInc/Kepware-ConfigAPI-SDK-Python | ff7757492a7ea1e73f3d5862ba956c8af4dcc797 | [
"MIT"
] | 9 | 2020-04-08T18:48:03.000Z | 2022-03-18T15:05:01.000Z | kepconfig/connectivity/tag.py | PTCInc/Kepware-ConfigAPI-SDK-Python | ff7757492a7ea1e73f3d5862ba956c8af4dcc797 | [
"MIT"
] | 5 | 2020-08-24T15:08:54.000Z | 2022-03-16T17:07:09.000Z | kepconfig/connectivity/tag.py | PTCInc/Kepware-ConfigAPI-SDK-Python | ff7757492a7ea1e73f3d5862ba956c8af4dcc797 | [
"MIT"
] | 10 | 2020-03-23T20:40:17.000Z | 2021-11-14T08:42:56.000Z | # -------------------------------------------------------------------------
# Copyright (c) PTC Inc. and/or all its affiliates. All rights reserved.
# See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
r"""`tag` exposes an API to allow modifications (add, delete, modify) to
tag and tag group objects within the Kepware Configuration API
"""
from ..error import KepError, KepHTTPError
from typing import Union
import kepconfig
from . import channel, device
import inspect
TAGS_ROOT = '/tags'
TAG_GRP_ROOT = '/tag_groups'
def _create_tags_url(tag = None):
'''Creates url object for the "tags" branch of Kepware's project tree. Used
to build a part of Kepware Configuration API URL structure
Returns the tag specific url when a value is passed as the tag name.
'''
if tag == None:
return TAGS_ROOT
else:
return '{}/{}'.format(TAGS_ROOT,tag)
def _create_tag_groups_url(tag_group = None):
'''Creates url object for the "tag_group" branch of Kepware's project tree. Used
to build a part of Kepware Configuration API URL structure
Returns the tag group specific url when a value is passed as the tag group name.
'''
if tag_group == None:
return TAG_GRP_ROOT
else:
return '{}/{}'.format(TAG_GRP_ROOT,tag_group)
def add_tag(server, tag_path, DATA) -> Union[bool, list]:
'''Add "tag" objects to a specific path in Kepware. To be used to
pass a list of tags to be added at one path location.
INPUTS:
"server" - instance of the "server" class
"tag_path" - path identifying location to add tags. Standard Kepware address decimal
notation string such as "channel1.device1.tag_group1"
"DATA" - properly JSON object (dict) of the tags
expected by Kepware Configuration API at the "tags" url
RETURNS:
True - If a "HTTP 201 - Created" is received from Kepware
List - If a "HTTP 207 - Multi-Status" is received from Kepware with a list of dict error responses for all
tags added that failed.
EXCEPTIONS:
KepHTTPError - If urllib provides an HTTPError
KepURLError - If urllib provides an URLError
'''
path_obj = kepconfig.path_split(tag_path)
try:
url = server.url+channel._create_url(path_obj['channel'])+device._create_url(path_obj['device'])
if 'tag_path' in path_obj:
for tg in path_obj['tag_path']:
url += _create_tag_groups_url(tag_group=tg)
url += _create_tags_url()
except KeyError as err:
err_msg = 'Error: No key {} identified | Function: {}'.format(err, inspect.currentframe().f_code.co_name)
raise KepError(err_msg)
except Exception as e:
err_msg = 'Error: Error with {}: {}'.format(inspect.currentframe().f_code.co_name, str(e))
raise KepError(err_msg)
r = server._config_add(url, DATA)
if r.code == 201: return True
elif r.code == 207:
errors = []
for item in r.payload:
if item['code'] != 201:
errors.append(item)
return errors
else: raise KepHTTPError(r.url, r.code, r.msg, r.hdrs, r.payload)
def add_tag_group(server, tag_group_path, DATA) -> Union[bool, list]:
'''Add "tag_group" objects to a specific path in Kepware. To be used to
pass a list of tag_groups and children (tags or tag groups) to be added at one
path location.
INPUTS:
"server" - instance of the "server" class
"tag_group_path" - path identifying location to add tag groups. Standard Kepware address decimal
notation string such as "channel1.device1.tag_group1"
"DATA" - properly JSON object (dict) of the tag groups
and it's children expected by Kepware Configuration API at the "tag_groups" url
RETURNS:
True - If a "HTTP 201 - Created" is received from Kepware
List - If a "HTTP 207 - Multi-Status" is received from Kepware with a list of dict error responses for all
tag groups added that failed.
EXCEPTIONS:
KepHTTPError - If urllib provides an HTTPError
KepURLError - If urllib provides an URLError
'''
path_obj = kepconfig.path_split(tag_group_path)
try:
url = server.url+channel._create_url(path_obj['channel'])+device._create_url(path_obj['device'])
if 'tag_path' in path_obj:
for tg in path_obj['tag_path']:
url += _create_tag_groups_url(tag_group=tg)
url += _create_tag_groups_url()
except KeyError as err:
err_msg = 'Error: No key {} identified | Function: {}'.format(err, inspect.currentframe().f_code.co_name)
raise KepError(err_msg)
except Exception as e:
err_msg = 'Error: Error with {}: {}'.format(inspect.currentframe().f_code.co_name, str(e))
raise KepError(err_msg)
r = server._config_add(url, DATA)
if r.code == 201: return True
elif r.code == 207:
errors = []
for item in r.payload:
if item['code'] != 201:
errors.append(item)
return errors
else: raise KepHTTPError(r.url, r.code, r.msg, r.hdrs, r.payload)
def add_all_tags(server, ch_dev_path, DATA) -> Union[bool, list]:
'''Add "tag" and "tag group" objects to a device in Kepware. To be used to
pass a list of tags, tag groups and/or children of tag groups (tags and tag
groups) to be added at once.
INPUTS:
"server" - instance of the "server" class
"ch_dev_path" - path to add tags and tag groups. Standard Kepware address decimal
notation string such as "channel1.device1"
"DATA" - properly JSON object (dict) of the tags,
tag groups and it's children expected by Kepware Configuration API.
RETURNS:
True - If a "HTTP 201 - Created" is received from Kepware for all items
List - [tag failure list, tag group failure list] - If a "HTTP 207 - Multi-Status" is received from
Kepware for either tags or tag groups, a list of dict error responses for all tags and/or tag groups added that failed.
False - If tags or tag groups are not found in DATA
EXCEPTIONS:
KepHTTPError - If urllib provides an HTTPError
KepURLError - If urllib provides an URLError
'''
######################################################
# Need to Handle HTTP 207 from the tag/tag group calls
######################################################
tags_result = False
tag_groups_result = False
# check to see if there are dict entries for tags or tag groups
if ('tag_groups' not in DATA) and ('tags' not in DATA):
return False
if 'tags' in DATA:
tags_result = add_tag(server, ch_dev_path, DATA['tags'])
if 'tag_groups' in DATA:
#Add all Tag Groups
tag_groups_result = add_tag_group(server, ch_dev_path, DATA['tag_groups'])
# build results return from both calls
if tags_result == True and tag_groups_result == True:
return True
elif tags_result == True:
return [[], tag_groups_result]
elif tag_groups_result == True:
return [tags_result, []]
else:
# mixed results from both tags and tag groups
return [tags_result, tag_groups_result]
def modify_tag(server, full_tag_path, DATA, force = False) -> bool:
'''Modify a "tag" object and it's properties in Kepware.
INPUTS:
"server" - instance of the "server" class
"full_tag_path" - path identifying location and tag to modify. Standard Kepware address decimal
notation string including the tag such as "channel1.device1.tag_group1.tag1"
"DATA" - properly JSON object (dict) of the tag properties to be modified.
"force" (optional) - if True, will force the configuration update to the Kepware server
RETURNS:
True - If a "HTTP 200 - OK" is received from Kepware
EXCEPTIONS:
KepHTTPError - If urllib provides an HTTPError
KepURLError - If urllib provides an URLError
'''
tag_data = server._force_update_check(force, DATA)
path_obj = kepconfig.path_split(full_tag_path)
try:
url = server.url+channel._create_url(path_obj['channel'])+device._create_url(path_obj['device'])
for x in range(0, len(path_obj['tag_path'])-1):
url += _create_tag_groups_url(tag_group=path_obj['tag_path'][x])
url += _create_tags_url(tag=path_obj['tag_path'][len(path_obj['tag_path'])-1])
except KeyError as err:
err_msg = 'Error: No key {} identified | Function: {}'.format(err, inspect.currentframe().f_code.co_name)
raise KepError(err_msg)
except Exception as e:
err_msg = 'Error: Error with {}: {}'.format(inspect.currentframe().f_code.co_name, str(e))
raise KepError(err_msg)
r = server._config_update(url, tag_data)
if r.code == 200: return True
else: raise KepHTTPError(r.url, r.code, r.msg, r.hdrs, r.payload)
def modify_tag_group(server, tag_group_path, DATA, force = False) -> bool:
'''Modify a "tag group" object and it's properties in Kepware.
INPUTS:
"server" - instance of the "server" class
"tag_group_path" - path identifying location and tag group to modify. Standard Kepware address decimal
notation string including the tag such as "channel1.device1.tag_group1"
"DATA" is required to be a properly JSON object (dict) of the tag properties to be modified.
RETURNS:
True - If a "HTTP 200 - OK" is received from Kepware
EXCEPTIONS:
KepHTTPError - If urllib provides an HTTPError
KepURLError - If urllib provides an URLError
'''
tag_group_data = server._force_update_check(force, DATA)
path_obj = kepconfig.path_split(tag_group_path)
try:
url = server.url+channel._create_url(path_obj['channel'])+device._create_url(path_obj['device'])
for tg in path_obj['tag_path']:
url += _create_tag_groups_url(tag_group=tg)
except KeyError as err:
err_msg = 'Error: No key {} identified | Function: {}'.format(err, inspect.currentframe().f_code.co_name)
raise KepError(err_msg)
except Exception as e:
err_msg = 'Error: Error with {}: {}'.format(inspect.currentframe().f_code.co_name, str(e))
raise KepError(err_msg)
r = server._config_update(url, tag_group_data)
if r.code == 200: return True
else: raise KepHTTPError(r.url, r.code, r.msg, r.hdrs, r.payload)
def del_tag(server, full_tag_path) -> bool:
'''Delete "tag" object at a specific path in Kepware.
INPUTS:
"server" - instance of the "server" class
"full_tag_path" - path identifying location and tag to delete. Standard Kepware address decimal
notation string including the tag such as "channel1.device1.tag_group1.tag1"
RETURNS:
True - If a "HTTP 200 - OK" is received from Kepware
EXCEPTIONS:
KepHTTPError - If urllib provides an HTTPError
KepURLError - If urllib provides an URLError
'''
path_obj = kepconfig.path_split(full_tag_path)
try:
url = server.url+channel._create_url(path_obj['channel'])+device._create_url(path_obj['device'])
for x in range(0, len(path_obj['tag_path'])-1):
url += _create_tag_groups_url(tag_group=path_obj['tag_path'][x])
url += _create_tags_url(tag=path_obj['tag_path'][len(path_obj['tag_path'])-1])
except KeyError as err:
err_msg = 'Error: No key {} identified | Function: {}'.format(err, inspect.currentframe().f_code.co_name)
raise KepError(err_msg)
except Exception as e:
err_msg = 'Error: Error with {}: {}'.format(inspect.currentframe().f_code.co_name, str(e))
raise KepError(err_msg)
r = server._config_del(url)
if r.code == 200: return True
else: raise KepHTTPError(r.url, r.code, r.msg, r.hdrs, r.payload)
def del_tag_group(server, tag_group_path) -> bool:
'''Delete "tag group" object at a specific path in Kepware.
INPUTS:
"server" - instance of the "server" class
"tag_group_path" - path identifying location and tag group to delete. Standard Kepware address decimal
notation string such as "channel1.device1.tag_group1"
RETURNS:
True - If a "HTTP 200 - OK" is received from Kepware
EXCEPTIONS:
KepHTTPError - If urllib provides an HTTPError
KepURLError - If urllib provides an URLError
'''
path_obj = kepconfig.path_split(tag_group_path)
try:
url = server.url+channel._create_url(path_obj['channel'])+device._create_url(path_obj['device'])
for tg in path_obj['tag_path']:
url += _create_tag_groups_url(tag_group=tg)
except KeyError as err:
err_msg = 'Error: No key {} identified | Function: {}'.format(err, inspect.currentframe().f_code.co_name)
raise KepError(err_msg)
except Exception as err:
err_msg = 'Error: Error with {}: {}'.format(inspect.currentframe().f_code.co_name, str(err))
raise KepError(err_msg)
r = server._config_del(url)
if r.code == 200: return True
else: raise KepHTTPError(r.url, r.code, r.msg, r.hdrs, r.payload)
def get_tag(server, full_tag_path) -> dict:
'''Returns the properties of the "tag" object at a specific path in Kepware.
Returned object is JSON.
INPUTS:
"server" - instance of the "server" class
"full_tag_path" - path identifying tag. Standard Kepware address decimal
notation string including the tag such as "channel1.device1.tag_group1.tag1"
RETURNS:
dict - data for the tag requested
EXCEPTIONS:
KepHTTPError - If urllib provides an HTTPError
KepURLError - If urllib provides an URLError
'''
path_obj = kepconfig.path_split(full_tag_path)
try:
url = server.url+channel._create_url(path_obj['channel'])+device._create_url(path_obj['device'])
for x in range(0, len(path_obj['tag_path'])-1):
url += _create_tag_groups_url(tag_group=path_obj['tag_path'][x])
url += _create_tags_url(tag=path_obj['tag_path'][len(path_obj['tag_path'])-1])
except KeyError as err:
err_msg = 'Error: No key {} identified | Function: {}'.format(err, inspect.currentframe().f_code.co_name)
raise KepError(err_msg)
except Exception as e:
err_msg = 'Error: Error with {}: {}'.format(inspect.currentframe().f_code.co_name, str(e))
raise KepError(err_msg)
r = server._config_get(url)
return r.payload
def get_all_tags(server, full_tag_path) -> list:
'''Returns the properties of all "tag" object at a specific path in Kepware.
Returned object is JSON list.
INPUTS:
"server" - instance of the "server" class
"full_tag_path" - path identifying location to retreive tag list. Standard Kepware address decimal
notation string including the tag such as "channel1.device1.tag_group1"
RETURNS:
list - data for the tags requested
EXCEPTIONS:
KepHTTPError - If urllib provides an HTTPError
KepURLError - If urllib provides an URLError
'''
path_obj = kepconfig.path_split(full_tag_path)
try:
url = server.url+channel._create_url(path_obj['channel'])+device._create_url(path_obj['device'])
if 'tag_path' in path_obj:
for tg in path_obj['tag_path']:
url += _create_tag_groups_url(tag_group=tg)
url += _create_tags_url()
except KeyError as err:
err_msg = 'Error: No key {} identified | Function: {}'.format(err, inspect.currentframe().f_code.co_name)
raise KepError(err_msg)
except Exception as e:
err_msg = 'Error: Error with {}: {}'.format(inspect.currentframe().f_code.co_name, str(e))
raise KepError(err_msg)
r = server._config_get(url)
return r.payload
def get_tag_group(server, tag_group_path) -> dict:
'''Returns the properties of the "tag group" object at a specific
path in Kepware. Returned object is JSON.
INPUTS:
"server" - instance of the "server" class
"tag_group_path" - path identifying tag group. Standard Kepware address decimal
notation string such as "channel1.device1.tag_group1"
RETURNS:
dict - data for the tag group requested
EXCEPTIONS:
KepHTTPError - If urllib provides an HTTPError
KepURLError - If urllib provides an URLError
'''
path_obj = kepconfig.path_split(tag_group_path)
try:
url = server.url+channel._create_url(path_obj['channel'])+device._create_url(path_obj['device'])
for tg in path_obj['tag_path']:
url += _create_tag_groups_url(tag_group=tg)
except KeyError as err:
err_msg = 'Error: No key {} identified | Function: {}'.format(err, inspect.currentframe().f_code.co_name)
raise KepError(err_msg)
except Exception as e:
err_msg = 'Error: Error with {}: {}'.format(inspect.currentframe().f_code.co_name, str(e))
raise KepError(err_msg)
r = server._config_get(url)
return r.payload
def get_all_tag_groups(server, tag_group_path) -> list:
'''Returns the properties of all "tag group" objects at a specific
path in Kepware. Returned object is JSON list.
INPUTS:
"server" - instance of the "server" class
"tag_group_path" - path identifying location to retreive tag group list. Standard Kepware address decimal
notation string such as "channel1.device1.tag_group1"
RETURNS:
list - data for the tag groups requested
EXCEPTIONS:
KepHTTPError - If urllib provides an HTTPError
KepURLError - If urllib provides an URLError
'''
path_obj = kepconfig.path_split(tag_group_path)
try:
url = server.url+channel._create_url(path_obj['channel'])+device._create_url(path_obj['device'])
if 'tag_path' in path_obj:
for tg in path_obj['tag_path']:
url += _create_tag_groups_url(tag_group=tg)
url += _create_tag_groups_url()
except KeyError as err:
err_msg = 'Error: No key {} identified | Function: {}'.format(err, inspect.currentframe().f_code.co_name)
raise KepError(err_msg)
except Exception as e:
err_msg = 'Error: Error with {}: {}'.format(inspect.currentframe().f_code.co_name, str(e))
raise KepError(err_msg)
r = server._config_get(url)
return r.payload
def get_full_tag_structure(server, path, recursive = False) -> dict:
'''Returns the properties of all "tag" and "tag group" objects at a specific
path in Kepware. Returned object is a dict of tag list and tag group list.
Ex.
{
'tags': [tag1_dict, tag2_dict,...],
'tag_groups':[tag_group1_dict, tag_group2_dict,...]
}
If recursive is TRUE, then the call will iterate through all tag groups and get the tags and
tag groups of all tag group children.This would be the equivilant of asking for all tags and tag groups
that exist below the "path" location. The returned object would look like below, nested based on how many
levels the tag_group namespace has tags or tag_groups:
Ex.
{
'tags': [tag1_dict, tag2_dict,...],
'tag_groups':[
{
tag_group1_properties,
'tags': [tag1_dict, tag2_dict,...]
'tag_groups':[sub_group1, subgroup2,...]
},
{
tag_group2_properties,
'tags': [tag1_dict, tag2_dict,...]
'tag_groups':[sub_group1, subgroup2,...]
},...]
}
INPUTS:
"server" - instance of the "server" class
"path" - path identifying location to retreive the tag structure. Standard Kepware address decimal
notation string such as "channel1.device1.tag_group1" and must container at least the channel and device.
RETURNS:
dict - data for the tag structure requested at "path" location
EXCEPTIONS:
KepHTTPError - If urllib provides an HTTPError
KepURLError - If urllib provides an URLError
'''
r = {}
r['tags'] = get_all_tags(server, path)
r['tag_groups'] = get_all_tag_groups(server, path)
if recursive:
for group in r['tag_groups']:
res = get_full_tag_structure(server, path + '.' + group['common.ALLTYPES_NAME'])
group.update(res)
return r
| 39.296512 | 124 | 0.660995 | 2,858 | 20,277 | 4.517495 | 0.079426 | 0.034699 | 0.029742 | 0.03346 | 0.849043 | 0.827124 | 0.798854 | 0.777477 | 0.757803 | 0.753543 | 0 | 0.00808 | 0.230951 | 20,277 | 515 | 125 | 39.372816 | 0.819867 | 0.438033 | 0 | 0.730233 | 0 | 0 | 0.105218 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.065116 | false | 0 | 0.023256 | 0 | 0.162791 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
938766ab18b5b0aafff63e7d539af8b33a165431 | 3,617 | py | Python | test/test_multiFunc.py | pbiernat/ripr | 223954f4b2e1283239c80df9ea434050da710de7 | [
"MIT"
] | 338 | 2017-01-16T05:05:22.000Z | 2022-02-21T00:39:30.000Z | test/test_multiFunc.py | pbiernat/ripr | 223954f4b2e1283239c80df9ea434050da710de7 | [
"MIT"
] | 16 | 2017-01-16T05:00:19.000Z | 2020-01-17T16:11:37.000Z | test/test_multiFunc.py | pbiernat/ripr | 223954f4b2e1283239c80df9ea434050da710de7 | [
"MIT"
] | 34 | 2017-01-20T05:29:34.000Z | 2021-08-16T04:08:25.000Z | import unittest
import subprocess
import sys,os
from ripr import test_harness
from ripr import gui
from ripr import analysis_engine
import binaryninja
class x64_multiTest(unittest.TestCase):
def test(self):
print ("Starting Test")
binary = "x64_multiFunc"
dname = os.path.dirname(os.path.abspath(__file__))
bv = binaryninja.BinaryViewType["ELF"].open(dname+"/../sample/multiFunc/%s" % binary)
bv.update_analysis_and_wait()
target = 0
for f in bv.functions:
if f.name == "func_1":
target = f.start
print ("Finished Loading Binary")
engine = analysis_engine.bn_engine(bv)
ui_ = gui.riprWidget()
p = test_harness.t_Packager(True, target, engine, ui=ui_)
p.impCallStrategy = 'hook'
p.dataStrategy = 'section'
p.resolve_arguments = True
p.package_function("x64_test")
if not os.path.exists('/tmp/riprtest'):
os.makedirs('/tmp/riprtest/')
t = open('/tmp/riprtest/%s.py' % binary, 'w+')
t.write(p.codeobj.final)
t.close()
testProc = subprocess.check_output(['python', '/tmp/riprtest/%s.py' % binary])
testProc = testProc.split("\n")
self.assertIn('15', testProc[-2])
class x86_multiTest(unittest.TestCase):
def test(self):
print ("Starting Test")
binary = "x86_multiFunc"
dname = os.path.dirname(os.path.abspath(__file__))
bv = binaryninja.BinaryViewType["ELF"].open(dname+"/../sample/multiFunc/%s" % binary)
bv.update_analysis_and_wait()
target = 0
for f in bv.functions:
if f.name == "func_1":
target = f.start
print ("Finished Loading Binary")
engine = analysis_engine.bn_engine(bv)
ui_ = gui.riprWidget()
p = test_harness.t_Packager(True, target, engine, ui=ui_)
p.impCallStrategy = 'hook'
p.dataStrategy = 'section'
p.resolve_arguments = True
p.package_function("x64_test")
if not os.path.exists('/tmp/riprtest'):
os.makedirs('/tmp/riprtest/')
t = open('/tmp/riprtest/%s.py' % binary, 'w+')
t.write(p.codeobj.final)
t.close()
testProc = subprocess.check_output(['python', '/tmp/riprtest/%s.py' % binary])
testProc = testProc.split("\n")
self.assertIn('15', testProc[-2])
class arm_multiTest(unittest.TestCase):
def test(self):
print ("Starting Test")
binary = "arm_multiFunc"
dname = os.path.dirname(os.path.abspath(__file__))
bv = binaryninja.BinaryViewType["ELF"].open(dname+"/../sample/multiFunc/%s" % binary)
bv.update_analysis_and_wait()
target = 0
for f in bv.functions:
if f.name == "func_1":
target = f.start
print ("Finished Loading Binary")
engine = analysis_engine.bn_engine(bv)
ui_ = gui.riprWidget()
p = test_harness.t_Packager(True, target, engine, ui=ui_)
p.impCallStrategy = 'hook'
p.dataStrategy = 'page'
p.resolve_arguments = True
p.package_function("x64_test")
if not os.path.exists('/tmp/riprtest'):
os.makedirs('/tmp/riprtest/')
t = open('/tmp/riprtest/%s.py' % binary, 'w+')
t.write(p.codeobj.final)
t.close()
testProc = subprocess.check_output(['python', '/tmp/riprtest/%s.py' % binary])
testProc = testProc.split("\n")
self.assertIn('15', testProc[-2])
| 30.141667 | 93 | 0.584186 | 428 | 3,617 | 4.792056 | 0.205607 | 0.064359 | 0.035105 | 0.040956 | 0.924427 | 0.924427 | 0.924427 | 0.924427 | 0.924427 | 0.924427 | 0 | 0.011115 | 0.278684 | 3,617 | 119 | 94 | 30.394958 | 0.77501 | 0 | 0 | 0.840909 | 0 | 0 | 0.146058 | 0.019087 | 0 | 0 | 0 | 0 | 0.034091 | 1 | 0.034091 | false | 0 | 0.079545 | 0 | 0.147727 | 0.068182 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f50eb09e135650296f597a210af72f5b7a8e6ba3 | 8,608 | py | Python | vae-dpc/cvae_backbone.py | lovish1234/TPC | 10e93eeb0e22e411579cfb9f94fac7870f6e2039 | [
"MIT"
] | null | null | null | vae-dpc/cvae_backbone.py | lovish1234/TPC | 10e93eeb0e22e411579cfb9f94fac7870f6e2039 | [
"MIT"
] | null | null | null | vae-dpc/cvae_backbone.py | lovish1234/TPC | 10e93eeb0e22e411579cfb9f94fac7870f6e2039 | [
"MIT"
] | null | null | null | # BVH, May 2020
# Components of CVAE
import sys
import math
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
sys.path.append('../backbone')
class My_CVAE_FC(nn.Module):
'''
Fully Connected Conditional Variational Autoencoder that maps an aggregated present
representation (c_t) to a plausible future representation (z_t+1 a.k.a. w_t+1).
Code inspired by https://github.com/jojonki/AutoEncoders/blob/master/cvae.ipynb
'''
def __init__(self, input_size, output_size, latent_size, hidden_size):
super(My_CVAE_FC, self).__init__()
self.input_size = input_size
self.output_size = output_size
self.latent_size = latent_size
self.hidden_size = hidden_size
# Encoder
self.enc_fc1 = nn.Sequential(
nn.Linear(output_size + input_size, hidden_size),
nn.ReLU(inplace=False)
)
self.enc_fc2_mu = nn.Linear(hidden_size, latent_size)
self.enc_fc2_logvar = nn.Linear(hidden_size, latent_size)
# Decoder
self.dec_fc1 = nn.Sequential(
nn.Linear(latent_size + input_size, hidden_size),
nn.ReLU(inplace=False)
)
self.dec_fc2 = nn.Linear(hidden_size, output_size)
def encode(self, y, x):
'''
Implements Q(z|Y, X), concretely gaussian_params(z) = (mu(Y, X), logvar(Y, X)).
y: (batch_size, output_size), x: (batch_size, input_size).
'''
y = y.view(-1, self.output_size)
x = x.view(-1, self.input_size)
# (batch_size, output_size + input_size)
inputs = torch.cat([y, x], dim=1)
h = self.enc_fc1(inputs)
z_mu = self.enc_fc2_mu(h)
z_logvar = self.enc_fc2_logvar(h)
return (z_mu, z_logvar)
def decode(self, z, x):
'''
Implements P(Y|z, X), concretely Y = f(z, X).
z: (batch_size, latent_size), x: (batch_size, input_size).
'''
z = z.view(-1, self.latent_size)
x = x.view(-1, self.input_size)
# (batch_size, latent_size + input_size)
inputs = torch.cat([z, x], dim=1)
h = self.dec_fc1(inputs)
y = self.dec_fc2(h)
return y
def forward_train(self, y, x):
y = y.view(-1, self.output_size)
x = x.view(-1, self.input_size)
mu, logvar = self.encode(y, x)
# standardnormal Gaussian distribution
eps = torch.empty_like(logvar).normal_()
eps = eps.cuda()
sigma = logvar.mul(0.5).exp_()
z = eps.mul(sigma) + mu
return self.decode(z, x), mu, logvar
def forward_test(self, x):
x = x.view(-1, self.input_size)
# standardnormal Gaussian distribution
z = torch.empty(x.shape[0], self.latent_size).normal_()
z = z.cuda()
return self.decode(z, x)
class My_CVAE_Conv1x1(nn.Module):
'''
1x1 Convolutional Conditional Variational Autoencoder that maps an aggregated present
representation (c_t) to a plausible future representation (z_t+1 a.k.a. w_t+1).
NOTE: all spatial locations act independently of each other, even the latent space and its distribution parameters.
'''
def __init__(self, input_size, output_size, latent_size, hidden_size):
super(My_CVAE_Conv1x1, self).__init__()
self.input_size = input_size
self.output_size = output_size
self.latent_size = latent_size
self.hidden_size = hidden_size
# Encoder
self.enc_fc1 = nn.Sequential(
nn.Conv2d(output_size + input_size, hidden_size,
kernel_size=1, padding=0),
nn.ReLU()
)
self.enc_fc2_mu = nn.Conv2d(
hidden_size, latent_size, kernel_size=1, padding=0)
self.enc_fc2_logvar = nn.Conv2d(
hidden_size, latent_size, kernel_size=1, padding=0)
# Decoder
self.dec_fc1 = nn.Sequential(
nn.Conv2d(latent_size + input_size, hidden_size,
kernel_size=1, padding=0),
nn.ReLU()
)
self.dec_fc2 = nn.Conv2d(
hidden_size, output_size, kernel_size=1, padding=0)
def encode(self, y, x):
'''
Implements Q(z|Y, X), concretely gaussian_params(z) = (mu(Y, X), logvar(Y, X)).
y: (batch_size, output_size, height, width), x: (batch_size, input_size, height, width).
'''
inputs = torch.cat(
[y, x], dim=1) # (batch_size, output_size + input_size, height, width)
h = self.enc_fc1(inputs)
z_mu = self.enc_fc2_mu(h)
z_logvar = self.enc_fc2_logvar(h)
return (z_mu, z_logvar)
def decode(self, z, x):
'''
Implements P(Y|z, X), concretely Y = f(z, X).
z: (batch_size, latent_size, height, width), x: (batch_size, input_size, height, width).
'''
inputs = torch.cat(
[z, x], dim=1) # (batch_size, latent_size + input_size, height, width)
h = self.dec_fc1(inputs)
y = self.dec_fc2(h)
return y
def forward_train(self, y, x):
mu, logvar = self.encode(y, x)
# standardnormal Gaussian distribution
eps = torch.empty_like(logvar).normal_()
eps = eps.cuda()
sigma = logvar.mul(0.5).exp_()
z = eps.mul(sigma) + mu
return self.decode(z, x), mu, logvar
def forward_test(self, x):
# standardnormal Gaussian distribution
z = torch.empty(x.shape[0], self.latent_size,
x.shape[2], x.shape[3]).normal_()
z = z.cuda()
return self.decode(z, x)
class My_CVAE_ConvFC(nn.Module):
'''
Convolutional Conditional Variational Autoencoder that maps an aggregated present
representation (c_t) to a plausible future representation (z_t+1 a.k.a. w_t+1).
The latent space exists in-between two pseudo fully connected layers such that
encoding and decoding can share information across all spatial blocks.
'''
def __init__(self, input_size, output_size, latent_size, hidden_size, spatial_size):
super(My_CVAE_ConvFC, self).__init__()
self.input_size = input_size
self.output_size = output_size
self.latent_size = latent_size
self.hidden_size = hidden_size
self.spatial_size = spatial_size
# Encoder
self.enc_fc1 = nn.Sequential(
nn.Conv2d(output_size + input_size, hidden_size,
kernel_size=1, padding=0),
nn.ReLU()
)
self.enc_fc2_mu = nn.Linear(
hidden_size * spatial_size ** 2, latent_size)
self.enc_fc2_logvar = nn.Linear(
hidden_size * spatial_size ** 2, latent_size)
# Decoder
self.dec_fc1 = nn.Sequential(
nn.Conv2d(latent_size + input_size, hidden_size,
kernel_size=1, padding=0),
nn.ReLU()
)
self.dec_fc2 = nn.Conv2d(
hidden_size, output_size, kernel_size=1, padding=0)
def encode(self, y, x):
'''
Implements Q(z|Y, X), concretely gaussian_params(z) = (mu(Y, X), logvar(Y, X)).
y: (batch_size, output_size, height, width), x: (batch_size, input_size, height, width).
'''
inputs = torch.cat(
[y, x], dim=1) # (batch_size, output_size + input_size, height, width)
h = self.enc_fc1(inputs)
# flatten before linear
h = h.view(-1, self.hidden_size * self.spatial_size ** 2)
z_mu = self.enc_fc2_mu(h)
z_logvar = self.enc_fc2_logvar(h)
return (z_mu, z_logvar)
def decode(self, z, x):
'''
Implements P(Y|z, X), concretely Y = f(z, X).
z: (batch_size, latent_size), x: (batch_size, input_size, height, width).
'''
z = z.repeat(self.spatial_size, self.spatial_size, 1, 1).permute(
2, 3, 0, 1) # broadcast across space
# (batch_size, latent_size + input_size, height, width)
inputs = torch.cat([z, x], dim=1)
h = self.dec_fc1(inputs)
y = self.dec_fc2(h)
return y
def forward_train(self, y, x):
mu, logvar = self.encode(y, x)
# standardnormal Gaussian distribution
eps = torch.empty_like(logvar).normal_()
eps = eps.cuda()
sigma = logvar.mul(0.5).exp_()
z = eps.mul(sigma) + mu
return self.decode(z, x), mu, logvar
def forward_test(self, x):
# standardnormal Gaussian distribution
z = torch.empty(x.shape[0], self.latent_size).normal_()
z = z.cuda()
return self.decode(z, x)
| 36.016736 | 119 | 0.596422 | 1,197 | 8,608 | 4.071011 | 0.128655 | 0.057254 | 0.056023 | 0.024625 | 0.856146 | 0.842192 | 0.828032 | 0.817566 | 0.801149 | 0.782475 | 0 | 0.016924 | 0.286129 | 8,608 | 238 | 120 | 36.168067 | 0.776078 | 0.269168 | 0 | 0.75 | 0 | 0 | 0.00183 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.098684 | false | 0 | 0.039474 | 0 | 0.236842 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1932af7d4e14b9a14d363c5e282ccbcce9c5c0a5 | 363 | py | Python | hatch_vcs/hooks.py | ofek/hatch-vcs | c5388d67192d9bf88191927a35f51705121784a1 | [
"MIT"
] | null | null | null | hatch_vcs/hooks.py | ofek/hatch-vcs | c5388d67192d9bf88191927a35f51705121784a1 | [
"MIT"
] | 1 | 2022-03-08T04:07:09.000Z | 2022-03-18T05:41:17.000Z | hatch_vcs/hooks.py | ofek/hatch-vcs | c5388d67192d9bf88191927a35f51705121784a1 | [
"MIT"
] | null | null | null | # SPDX-FileCopyrightText: 2022-present Ofek Lev <oss@ofek.dev>
#
# SPDX-License-Identifier: MIT
from hatchling.plugin import hookimpl
from .build_hook import VCSBuildHook
from .version_source import VCSVersionSource
@hookimpl
def hatch_register_version_source():
return VCSVersionSource
@hookimpl
def hatch_register_build_hook():
return VCSBuildHook
| 20.166667 | 62 | 0.809917 | 44 | 363 | 6.5 | 0.590909 | 0.062937 | 0.188811 | 0.223776 | 0.27972 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012579 | 0.123967 | 363 | 17 | 63 | 21.352941 | 0.886792 | 0.245179 | 0 | 0.222222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | true | 0 | 0.333333 | 0.222222 | 0.777778 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
1957df9d727af4a17057f40b347084ebcedd5e4d | 4,245 | py | Python | no17/no17.py | shopetan/ksnctf | f4b7724c956a5dc0590c83ff6f5fb6a249b81371 | [
"MIT"
] | null | null | null | no17/no17.py | shopetan/ksnctf | f4b7724c956a5dc0590c83ff6f5fb6a249b81371 | [
"MIT"
] | null | null | null | no17/no17.py | shopetan/ksnctf | f4b7724c956a5dc0590c83ff6f5fb6a249b81371 | [
"MIT"
] | null | null | null | #coding:utf-8
import math
solve = 2748040023408750324411119450523386950660946398855386842074606380418316981389557916980086140301887947706700698930830779678048474531538039134089675000612962004189001422715316147779554460684462041893073445562829316520071658956471592707597247194589999870235577599858641217209525243986680999448565468816434633441308131788183291153809253610695081752296732033298647222814340913466738465892791206393936089466068684809286651197884210187525269355913763182559833600649423167126622527203197940618965341674710993871930168655984019611567024681974446413864111651893113475795042753452042221938667445789706741508160949598322950403760355305740757495122850819958219745478009476321531997688864567881328571570240278649150057863614800304034452842380274161491817926949213762740941829027657311016236224840157689532838274458699038989430527152474540367086746579688987076042252804910459873636444778218434530247647760637770881658596016745610672707638583665201858035977485748775481448417394363801163664632527695106599930657132405666766730530997168969743603771751166591137309462845077320233889570871715682231576283485837079838925927845291565664213349164253238166525895494203520538861102027123057706413048503799598270037162337386882901940037500301040636118696723417952777083334146545991127148023661461455142653367976629308434919237639329808504561590505864983890552051987234096577849288536293631380950881787840319976968198704697701966146561843819563765280293823120028941691560894722032503932540560461794190408016359786029679686957711035845785762377768203676919060935155382104877926736292611130243057909501332528103700463961697932230444978571571548190911155741113324573679444638703192583211952316173122745153529542339170631749363019742630339456502772150867703497326010832217054307087826776870481852284816747574983354077170761286175754243223519482572371717625453405597596790583499145036350302955327521461648262537855645876387858201576107385450844609238327605056916243564458120595540013872075267316304999752934829122583429168665162743589578036716137649553856654996867605565582594039606555708509284616434305172100068285925706963351193710675088846623856567419346569873886366829228933416064828304824833588800700991940600359503453201939139663042787644390810036292415117714919711827630953170559057272633043896443339064006637234499569232762828723613158050896065355005775876910820958296537497557737916521798848004761708690607167573807307291510879396794861418856342383200817566360552405183866698509354047737422523253071467100174078467454351746681775690022510266842064132386305358891086764558955802257688899610117102582837343655907837234028334304769930810792079059216436489942124896722072971246781926084943216581585837400274934104255861076781834022322597318553478829221018993823759479304536464719195824731739557957722610850860725276329731096193041588880149698625007746958307472328762247329346952956782896672291984502790479223886842985800649168009891087704339671376795754679245964575179873102014722210341771266309855717402003098724600141420936602986387680283404929020457247001371544838792904086327642729822000980710278752669990211765608002907900832262843253793831541691706704836397397798869236939393204666502455311086553874765248631328418556164635889080357612074921368044611251307530838475840480894307375072202500636365832958938363048173011687247738236161480446422712858040552310006617829659443118541556912488329721272939472554467384944920030182974546889304443711910957344160175437149714520561879951921970795705645045936350875827028675689840953101114431720413756855193291198455863087675930604549263160397353363504597829924339064422377323361781720524799661393081986371074530022532621955945720583925291264598924971169093688390536693144593482790588893095052569365154072722966434676949346037949263628957665599420417719951187489606010866702371368012263032537375401145460592536898818245350468847674995676417425737655723761467908866712060720593684978725896677308273
#適当に
i = 10
while i**101 < solve:
i = i*10
low = 0
high = i
ave = (low + high) / 2
while (low <= high):
if(ave**101 == solve):
break
elif(ave**101 > solve):
high = ave
elif(ave**101 < solve):
low = ave
ave = (low + high) / 2
print "FLAG_%d" % ave
| 163.269231 | 3,921 | 0.961131 | 53 | 4,245 | 76.962264 | 0.433962 | 0.007845 | 0.00809 | 0.005393 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.951379 | 0.026148 | 4,245 | 25 | 3,922 | 169.8 | 0.035317 | 0.003534 | 0 | 0.117647 | 0 | 0 | 0.001656 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0.058824 | null | null | 0.058824 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5fe46abf0b3d770313b1fba0c2756eca626de8fc | 1,754 | py | Python | tests/convert_beginning_spaces_test.py | toddnguyen47/pre-commit-hooks | ce500b759db3627ac88598dd183bcac473b4f1bb | [
"MIT"
] | null | null | null | tests/convert_beginning_spaces_test.py | toddnguyen47/pre-commit-hooks | ce500b759db3627ac88598dd183bcac473b4f1bb | [
"MIT"
] | null | null | null | tests/convert_beginning_spaces_test.py | toddnguyen47/pre-commit-hooks | ce500b759db3627ac88598dd183bcac473b4f1bb | [
"MIT"
] | null | null | null | """Convert beginning spaces to tabs test"""
from pre_commit_hooks import convert_beginning_spaces
def test_given_only_tabs_when_converting_then_no_conversion_is_needed():
test_str = " Hello World"
actual_str = convert_beginning_spaces.convert_spaces_to_tabs(test_str, 4)
assert actual_str == " Hello World"
def test_given_extra_whitespace_when_converting_then_no_conversion():
test_str = " Hello World"
actual_str = convert_beginning_spaces.convert_spaces_to_tabs(test_str, 4)
assert actual_str == " Hello World"
def test_given_spaces_when_converting_then_convert_correctly():
test_str = " Hello World"
actual_str = convert_beginning_spaces.convert_spaces_to_tabs(test_str, 4)
assert actual_str == " Hello World"
def test_given_nonsecutive_spaces_when_converting_then_convert_correctly():
test_str = " Hello World"
actual_str = convert_beginning_spaces.convert_spaces_to_tabs(test_str, 4)
assert actual_str == " Hello World"
def test_given_only_spaces_when_converting_then_convert_correctly():
test_str = " Hello World"
actual_str = convert_beginning_spaces.convert_spaces_to_tabs(test_str, 4)
assert actual_str == " Hello World"
def test_given_only_spaces_with_extra_spaces_when_converting_then_convert_correctly():
test_str = " Hello World"
actual_str = convert_beginning_spaces.convert_spaces_to_tabs(test_str, 4)
assert actual_str == " Hello World"
def test_given_only_spaces_with_tabs_found_after_non_whitespace_char_when_converting_then_convert_correctly():
test_str = " Hello World"
actual_str = convert_beginning_spaces.convert_spaces_to_tabs(test_str, 4)
assert actual_str == " Hello World"
| 38.130435 | 110 | 0.775371 | 242 | 1,754 | 5.066116 | 0.14876 | 0.079935 | 0.14845 | 0.104405 | 0.862969 | 0.814029 | 0.814029 | 0.814029 | 0.814029 | 0.814029 | 0 | 0.004759 | 0.161346 | 1,754 | 45 | 111 | 38.977778 | 0.828688 | 0.021095 | 0 | 0.724138 | 0 | 0 | 0.130333 | 0 | 0 | 0 | 0 | 0 | 0.241379 | 1 | 0.241379 | false | 0 | 0.034483 | 0 | 0.275862 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
270e63031ed6b043785129b05f2dfdc64fa6002e | 23,457 | py | Python | u8g2/luRS08_te.py | tve/mpy-lib | 9f102459c61a5be424291a277e421bd1fc16843a | [
"MIT"
] | 6 | 2020-02-27T11:17:54.000Z | 2020-12-04T10:14:26.000Z | u8g2/luRS08_te.py | tve/mpy-lib | 9f102459c61a5be424291a277e421bd1fc16843a | [
"MIT"
] | 4 | 2020-07-29T14:07:04.000Z | 2021-05-19T05:10:33.000Z | u8g2/luRS08_te.py | tve/mpy-lib | 9f102459c61a5be424291a277e421bd1fc16843a | [
"MIT"
] | 3 | 2020-05-16T08:15:16.000Z | 2021-09-30T10:39:37.000Z | data=b'\xbe\x00\x03\x02\x04\x04\x04\x04\x05\x11\x11\xfa\xfb\x08\xfe\x08\xfe\x01\x65\x02\xe5\x08\xaf\x20\x05\x00\x98\x13\x21\x07\x81\x89\x13\x97\x00\x22\x07\x33\xd9\x14\x89\x25\x23\x12\x87\x88\x77\x49\xa6\x44\xc3\x94\x84\x49\x34\x4c\x89\x96\x64\x00\x24\x10\xa5\x79\x57\xd9\xa0\x44\x49\x26\x6a\x49\x94\x0c\x5a\x04\x25\x0f\x86\x89\x37\x99\xd2\x32\x25\x61\x12\x2d\x2d\x5a\x02\x26\x10\x85\x89\x38\x53\x12\x25\x91\x94\x24\xca\x10\x45\x4a\x00\x27\x06\x31\xd9\x13\x03\x28\x09\xa2\x69\x34\x89\xd2\x53\x12\x29\x0b\xa2\x69\x14\x49\x94\x74\x51\x12\x00\x2a\x0a\x44\xc9\x35\xd1\x90\x48\x0a\x00\x2b\x0b\x77\x88\x77\x71\x6d\x18\xb2\xb8\x06\x2c\x08\x42\x69\x13\x43\xa2\x00\x2d\x06\x12\xb9\x14\x02\x2e\x06\x11\x89\x13\x01\x2f\x0c\xa5\x68\x95\x59\x58\x0b\xb3\xb0\x16\x02\x30\x0e\x86\x88\x37\x43\x54\x09\x1d\x93\x28\x1a\x12\x00\x31\x07\x82\x8a\x17\x4a\x3f\x32\x0b\x85\x89\x37\x4b\x16\x66\x92\xdb\x20\x33\x0d\x85\x89\x17\x43\x1a\x26\x6b\x18\x0e\x0a\x00\x34\x0e\x86\x88\x77\xa2\x96\x44\x15\x29\x19\xc6\x34\x01\x35\x0c\x84\x89\x17\x83\x96\x8d\x59\x36\x24\x00\x36\x0d\x85\x89\x37\x43\x92\x85\x43\x92\xd9\x92\x05\x37\x0c\x85\x89\x17\x83\x98\x65\x5a\x4d\x0b\x01\x38\x0d\x85\x89\x37\x4b\xa6\x25\x8b\xb2\x69\xc9\x02\x39\x0e\x85\x89\x37\x4b\xb4\x69\xc9\x10\x66\xc9\x90\x00\x3a\x07\x61\x89\x13\x61\x00\x3b\x07\x81\x69\x13\xe1\x00\x3c\x0a\x76\x88\xb7\x99\x49\x56\xe5\x00\x3d\x07\x36\xa8\x17\xe7\x61\x3e\x0b\x76\x88\x17\xb1\x2a\x4b\x9a\x96\x02\x3f\x0b\x84\x88\x15\x63\x16\xd5\x72\x20\x02\x40\x12\x88\x8a\x79\x43\xa4\x29\xc9\xa2\x94\x94\x92\x62\xca\x91\x21\x02\x41\x0f\x87\x88\x78\xe9\x98\x84\x49\x56\x1a\xa4\x2c\x49\x03\x42\x0f\x85\x89\x16\x43\x92\x69\x83\x12\x25\x99\x36\x28\x00\x43\x0c\x86\x89\x58\x43\x92\xa9\xcd\x59\x34\x04\x44\x0d\x87\x89\x18\x83\x14\x26\xa9\xc7\x64\x90\x00\x45\x0b\x85\x89\x16\xc7\x70\x48\xc2\xe2\x20\x46\x0b\x84\x89\x16\x83\x56\x1b\xb4\x1a\x00\x47\x0d\x86\x89\x58\x43\x92\xa9\x45\x31\xc9\xa2\x21\x48\x0b\x86\x89\x18\xa1\x71\x18\x44\xc7\x00\x49\x07\x81\x89\x13\x07\x01\x4a\x08\xa3\x68\x53\xfd\x69\x01\x4b\x0e\x86\x89\x17\xd9\x54\xc9\x6e\x89\x14\x29\x99\x00\x4c\x09\x85\x89\x16\x61\x1f\x07\x01\x4d\x11\x87\x89\x19\xda\x90\x0d\xd9\x52\x51\x2a\x4a\x45\x8a\xd4\x00\x4e\x0f\x86\x89\x18\xe1\x36\x44\x4a\x24\x25\xd2\x90\x8d\x01\x4f\x0c\x87\x89\x59\x5b\x25\x75\x4d\xb2\x6c\x02\x50\x0c\x85\x89\x16\x43\x92\x99\x86\x25\x2c\x02\x51\x13\xa8\x69\x59\x63\x16\xa5\x49\x9a\xa4\x49\x1a\x65\xda\x90\x03\x3a\x20\x52\x10\x86\x89\x17\x43\x94\x25\x59\x12\x45\x5b\x54\x4b\x32\x01\x53\x0c\x85\x88\x36\x4b\x98\xaa\xa9\x36\x28\x00\x54\x0a\x87\x88\x17\x87\x2c\xee\x1b\x00\x55\x0a\x86\x89\x18\xa1\x1f\x93\x21\x01\x56\x10\x87\x88\x17\x69\x92\x29\x59\x94\x55\xc2\x24\x5c\x33\x00\x57\x12\x89\x88\x19\x59\xa6\x55\x3a\x25\x4d\x49\x53\xd2\xa6\x88\x59\x04\x58\x0e\x87\x88\x17\x9a\x92\x55\x52\x71\x4c\xb2\x8a\x26\x59\x0d\x87\x88\x17\x9a\x92\x55\xc2\x24\x8d\xdb\x00\x5a\x0b\x86\x88\x17\xc7\x50\x6c\x0d\x87\x01\x5b\x08\xa2\x69\x14\x4b\xbf\x08\x5c\x0b\xa5\x68\x15\x69\x58\x0d\xd3\xb0\x1a\x5d\x08\xa2\x68\x14\x4a\xbf\x0c\x5e\x0a\x76\x98\x77\xa1\x2d\xea\x25\x0c\x5f\x07\x15\x78\x16\x83\x00\x60\x06\x12\xfa\x17\x02\x61\x0d\x65\x89\x36\x4b\x14\x2d\x51\x12\x45\x4a\x00\x62\x0c\x85\x89\x17\x61\x38\x24\x99\xd3\xb0\x00\x63\x0a\x64\x89\x36\x83\x94\x65\xda\x00\x64\x0b\x85\x89\x97\x95\x61\xd2\x6c\xc9\x10\x65\x0b\x65\x88\x36\x4b\x36\x0c\xa1\x38\x04\x66\x0b\x94\x88\x54\x4a\x16\x4d\x59\x27\x00\x67\x0d\x85\x69\x37\xc3\xa4\xd9\x92\x21\x1c\x14\x00\x68\x0a\x85\x89\x17\x61\xc5\xa4\xb9\x05\x69\x07\x81\x89\x13\xc9\x30\x6a\x09\xa3\x68\x53\x69\xd4\xd3\x02\x6b\x0c\x85\x89\x16\x61\x69\x89\x4e\x89\x12\x09\x6c\x07\x81\x89\x13\x07\x01\x6d\x0e\x69\x89\x1a\xc9\x62\xb2\x65\x5a\xa6\x65\x5a\x01\x6e\x09\x65\x89\x17\x89\x49\x73\x0b\x6f\x0d\x66\x88\x37\x43\x22\x8d\xe2\xa4\x0c\x09\x00\x70\x0c\x85\x69\x17\x43\x92\x39\x0d\x4b\x18\x02\x71\x0b\x85\x69\x37\xc3\xa4\xd9\x92\x21\x2c\x72\x09\x63\x89\x15\xc9\x12\x35\x01\x73\x0a\x64\x89\x36\x43\x26\x4e\x43\x02\x74\x0a\x74\x88\x34\xd1\x90\x64\x8d\x02\x75\x09\x65\x89\x17\x99\x27\x45\x09\x76\x0c\x66\x88\x16\x61\x12\x35\x0d\x99\x28\x01\x77\x0e\x68\x88\x18\x91\x53\x92\x28\x91\x52\x72\x8a\x00\x78\x0d\x66\x88\x17\x92\xa2\x64\xa2\x16\x25\x92\x00\x79\x0d\x86\x68\x16\x61\x12\xb5\x6d\xa2\x18\x6a\x00\x7a\x0b\x65\x89\x16\x83\x96\x49\x5a\x36\x08\x7b\x0a\xa3\x68\x54\x49\xd4\x22\xb5\x05\x7c\x07\xa1\x69\x13\x0f\x01\x7d\x0b\xa3\x69\x14\x59\x54\x52\xa2\x96\x08\x7e\x09\x36\xa8\x37\x92\x62\x52\x00\xa0\x05\x00\x98\x13\xa1\x07\x81\x69\x13\xc9\x30\xa2\x10\x85\x89\x57\xd9\xa0\x44\x49\x94\x44\x49\x36\x44\x11\x00\xa3\x0b\x84\x8a\x57\x4a\x16\x4d\x59\x69\x08\xa4\x11\x87\x88\x17\x69\x32\x48\x8a\x94\x45\x59\xa4\x48\x83\x92\x06\xa5\x0f\x86\x89\x17\x99\x12\x45\xa3\x36\x44\x43\x96\x66\x00\xa6\x08\xa1\x69\x13\x43\x34\x04\xa7\x0f\xa5\x69\x37\x83\x98\x2e\xd1\x10\x25\xab\x36\x28\x00\xa8\x07\x13\xfa\x17\x49\x00\xa9\x13\x88\x89\x59\x43\x96\x29\x91\xb2\x84\x4a\x28\x29\x4a\xa6\x0d\x11\x00\xaa\x0a\x44\xc8\x15\x43\x32\x44\x83\x00\xab\x0d\x65\x88\x56\x49\xcb\x90\x28\x51\x92\x25\x01\xac\x07\x36\xa8\x17\xd7\x02\xad\x06\x12\xb9\x14\x02\xae\x0a\x54\xb9\x37\xca\xb0\x0c\x8a\x02\xaf\x06\x13\xfa\x17\x03\xb0\x07\x23\xea\x13\x8b\x00\xb1\x0d\x77\x88\x77\x71\x36\x0c\x59\x9c\x63\xc3\x10\xb2\x09\x53\xb2\x14\x53\x32\x24\x03\xb3\x09\x53\xb2\x14\xcb\x90\x0d\x01\xb4\x06\x12\xfb\x17\x02\xb5\x0b\x85\x69\x17\x99\xa7\x41\x11\x43\x00\xb6\x08\xa4\x69\x17\x5f\x26\x3f\xb7\x06\x11\xbb\x17\x01\xb8\x07\x32\x6b\x17\xd1\x00\xb9\x07\x52\xb2\x14\x4a\x07\xba\x0a\x44\xc8\x35\x4a\x24\x25\x0a\x00\xbb\x0d\x65\x88\x16\x49\x96\x44\x89\x32\x24\x9d\x00\xbc\x12\x88\x8b\x19\x9a\x94\x95\xc2\x44\x89\x16\x2d\x29\x45\xc3\x98\x00\xbd\x11\x87\x8b\x19\x9a\x92\x45\xb5\x64\x48\xa6\x92\x12\x29\xd2\x00\xbe\x12\x88\x8b\x19\x93\x29\x92\xd2\x25\x19\x12\x2d\x29\x45\xc3\x98\x00\xbf\x0a\x84\x68\x55\x39\x90\x45\xb5\x70\xc0\x10\x97\x88\x58\x72\x3a\x26\x61\x92\x95\x06\x29\x4b\xd2\x00\xc1\x10\x97\x88\x78\x6a\x3a\x26\x61\x92\x95\x06\x29\x4b\xd2\x00\xc2\x10\x97\x88\x58\x43\x98\x8e\x49\x98\x64\xa5\x41\xca\x92\x34\xc3\x10\x97\x88\x58\x43\x98\x8e\x49\x98\x64\xa5\x41\xca\x92\x34\xc4\x10\x97\x88\x58\x49\x9a\x8e\x49\x98\x64\xa5\x41\xca\x92\x34\xc5\x11\xa7\x88\x78\x62\x14\x8a\x63\x12\x26\x59\x69\x90\xb2\x24\x0d\xc6\x13\x8a\x88\x9a\xc3\xa8\x03\x49\x0e\x24\x43\x16\xa5\x83\x9a\x85\xe1\x20\xc7\x0e\xa6\x69\x58\x43\x92\xa9\xcd\x59\x34\x64\xa1\x04\xc8\x0d\x95\x89\x56\xd1\x30\x86\x43\x12\x16\x07\x01\xc9\x0d\x95\x89\x36\xd9\x30\x86\x43\x12\x16\x07\x01\xca\x0d\x95\x89\x36\xd2\x30\x86\x43\x12\x16\x07\x01\xcb\x0d\x95\x89\x36\x95\x61\x0c\x87\x24\x2c\x0e\x02\xcc\x07\x92\x8a\x13\x4a\x7f\xcd\x08\x92\x89\x13\x4b\xbf\x00\xce\x0a\x94\x8a\x13\x43\x92\xf5\x13\x00\xcf\x09\x93\x8a\x13\x49\xa9\x5f\x00\xd0\x12\x88\x88\x38\x83\x16\x46\xe9\xa0\x25\x69\x92\x26\x61\x34\x48\x00\xd1\x10\x96\x89\x38\x43\x12\x6e\x43\xa4\x44\x52\x22\x0d\xd9\x18\xd2\x0d\x97\x89\x79\xe9\x56\x49\x5d\x93\x2c\x9b\x00\xd3\x0d\x97\x89\x79\xe9\x56\x49\x5d\x93\x2c\x9b\x00\xd4\x0c\x97\x89\x79\xb7\x4a\xea\x9a\x64\xd9\x04\xd5\x0e\x97\x89\x59\x43\xb6\x55\x52\xd7\x24\xcb\x26\x00\xd6\x0e\x97\x89\x59\x49\xb8\x55\x52\xd7\x24\xcb\x26\x00\xd7\x0e\x76\x88\x17\x61\x12\x45\x43\xa6\x0d\x51\x25\x0c\xd8\x12\x87\x89\x59\x83\x92\x25\x59\x22\x29\x26\x25\x4b\xb2\x64\x90\x00\xd9\x0b\x96\x89\x58\x52\xe8\xc7\x64\x48\x00\xda\x0b\x96\x89\x58\x52\xe8\xc7\x64\x48\x00\xdb\x0c\x96\x89\x38\x43\x12\xfa\x31\x19\x12\x00\xdc\x0b\x96\x89\x38\xa5\xd0\x8f\xc9\x90\x00\xdd\x0e\x97\x88\x77\x92\xa6\x64\x95\x30\x49\xe3\x36\x00\xde\x0c\x85\x89\x16\xe1\x90\x64\x4e\xc3\x12\x02\xdf\x0f\x95\x89\x17\x53\x25\x4a\x4a\x49\x54\xc9\x2c\x0a\x00\xe0\x0d\x75\x89\x56\xd9\x12\x45\x4b\x94\x44\x91\x12\xe1\x0d\x75\x89\x56\xd9\x12\x45\x4b\x94\x44\x91\x12\xe2\x0d\x75\x89\x16\xdb\x12\x45\x4b\x94\x44\x91\x12\xe3\x0e\x75\x89\x16\x43\xb4\x44\xd1\x12\x25\x51\xa4\x04\xe4\x0d\x75\x89\x36\xa5\x25\x8a\x96\x28\x89\x22\x25\xe5\x0e\x85\x89\x36\x52\x69\x89\xa2\x25\x4a\xa2\x48\x09\xe6\x0e\x67\x89\x39\x96\xa8\x32\x0c\x51\x16\x49\xcb\x00\xe7\x0c\x84\x69\x36\x83\x94\x65\xda\x14\x29\x00\xe8\x0c\x75\x88\x56\xd9\x92\x0d\x43\x28\x0e\x01\xe9\x0c\x75\x88\x56\xd9\x92\x0d\x43\x28\x0e\x01\xea\x0c\x75\x88\x56\xcb\x92\x0d\x43\x28\x0e\x01\xeb\x0c\x75\x88\x36\xa5\x25\x1b\x86\x50\x1c\x02\xec\x07\x72\x8a\x13\x4a\x1f\xed\x08\x72\x89\x13\x4b\x2f\x00\xee\x09\x74\x8a\x13\x43\x92\xf5\x04\xef\x09\x73\x8a\x13\x49\xa9\x17\x00\xf0\x0d\x85\x89\x17\x5b\x12\x2d\x99\x6d\x51\x16\x00\xf1\x0b\x75\x89\x17\x43\x92\x98\x34\xb7\x00\xf2\x0d\x76\x88\x77\xd9\x90\x48\xa3\x38\x29\x43\x02\xf3\x0d\x76\x88\x57\xe1\x90\x48\xa3\x38\x29\x43\x02\xf4\x0d\x76\x88\x57\xda\x90\x48\xa3\x38\x29\x43\x02\xf5\x0e\x76\x88\x37\x43\x34\x24\xd2\x28\x4e\xca\x90\x00\xf6\x0d\x76\x88\x37\xb5\x21\x91\x46\x71\x52\x86\x04\xf7\x0b\x67\x88\x77\x39\x6d\x18\x72\x2c\x03\xf8\x0d\x66\x88\x37\xc3\x10\x4d\x89\x12\x1d\x86\x04\xf9\x0a\x75\x89\x37\x52\xe6\x49\x51\x02\xfa\x0a\x75\x89\x57\x4a\xe6\x49\x51\x02\xfb\x0a\x75\x89\x37\x83\xe6\x49\x51\x02\xfc\x09\x75\x89\x37\x6d\x9e\x14\x25\xfd\x0e\x96\x68\x56\x52\x98\x44\x6d\x9b\x28\x86\x1a\x00\xfe\x0e\xa5\x69\x17\x61\x38\x24\x99\xd3\xb0\x84\x21\x00\xff\x0e\x96\x68\x36\xa5\x30\x89\xda\x36\x51\x0c\x35\x00\x00\x00\x00\x08\x01\x64\x05\x89\xff\xff\x01\x00\x11\xa7\x88\x58\x3b\x9a\x8e\x49\x98\x64\xa5\x41\xca\x92\x34\x01\x01\x0f\x85\x89\x36\x3b\xb0\x44\xd1\x12\x25\x51\xa4\x04\x01\x02\x12\xa7\x88\x58\x43\x0e\xa6\x63\x12\x26\x59\x69\x90\xb2\x24\x0d\x01\x03\x10\x85\x89\x16\x43\x0e\x2c\x51\xb4\x44\x49\x14\x29\x01\x01\x04\x12\xa7\x68\x78\xe9\x98\x84\x49\x56\x1a\xa4\x2c\x49\x73\x4c\x02\x01\x05\x10\x85\x69\x36\x4b\x14\x2d\x51\x12\x45\x4a\x0e\x28\x00\x01\x06\x0f\xa6\x89\x58\x3a\x36\x24\x99\xda\x9c\x45\x43\x00\x01\x07\x0b\x84\x89\x36\xf2\x20\x65\x99\x36\x01\x08\x0f\xa6\x89\x38\x43\x0e\x0d\x49\xa6\x36\x67\xd1\x10\x01\x09\x0c\x84\x89\x16\x43\x3a\x48\x59\xa6\x0d\x01\x0a\x0f\xa6\x89\x78\x39\x36\x24\x99\xda\x9c\x45\x43\x00\x01\x0b\x0c\x84\x89\x36\x39\x30\x48\x59\xa6\x0d\x01\x0c\x0f\xa6\x89\x38\x43\x0e\x0d\x49\xa6\x36\x67\xd1\x10\x01\x0d\x0c\x84\x89\x16\x43\x3a\x48\x59\xa6\x0d\x01\x0e\x10\xa7\x89\x38\x43\x0e\x0d\x52\x98\xa4\x1e\x93\x41\x02\x01\x0f\x10\x88\x89\x9a\x89\x18\x25\x43\x32\x44\x59\x1f\x87\x0c\x01\x10\x13\x88\x88\x38\x83\x16\x46\xe9\xa0\x25\x69\x92\x26\x61\x34\x48\x00\x01\x11\x0e\x85\x89\x97\xc9\x90\x0c\x93\x66\x4b\x86\x00\x01\x12\x0e\xa5\x89\x36\xf3\x30\x86\x43\x12\x16\x07\x01\x01\x13\x0e\x85\x88\x36\x3b\xb0\x64\xc3\x10\x8a\x43\x00\x01\x14\x0e\xa5\x89\x16\x43\x3c\x8c\xe1\x90\x84\xc5\x41\x01\x15\x0e\x85\x88\x36\x43\xbc\x64\xc3\x10\x8a\x43\x00\x01\x16\x0e\xa5\x89\x56\x39\x70\x0c\x87\x24\x2c\x0e\x02\x01\x17\x0e\x85\x88\x56\x39\xb2\x64\xc3\x10\x8a\x43\x00\x01\x18\x0e\xa5\x69\x16\xc7\x70\x48\xc2\xe2\x20\x4b\x00\x01\x19\x0e\x85\x68\x36\x4b\x36\x0c\xa1\x38\xc4\x12\x00\x01\x1a\x0e\xa5\x89\x16\x43\x3c\x8c\xe1\x90\x84\xc5\x41\x01\x1b\x0e\x85\x88\x36\x43\xbc\x64\xc3\x10\x8a\x43\x00\x01\x1c\x11\xa6\x89\x38\x43\x0e\x0d\x49\xa6\x16\xc5\x24\x8b\x86\x00\x01\x1d\x0f\xa5\x69\x37\x43\x3c\x4c\x9a\x2d\x19\xc2\x41\x01\x01\x1e\x11\xa6\x89\x38\x43\x0e\x0d\x49\xa6\x16\xc5\x24\x8b\x86\x00\x01\x1f\x0f\xa5\x69\x37\x43\x3c\x4c\x9a\x2d\x19\xc2\x41\x01\x01\x20\x10\xa6\x89\x78\x39\x36\x24\x99\x5a\x14\x93\x2c\x1a\x02\x01\x21\x0f\xa5\x69\x57\x39\x32\x4c\x9a\x2d\x19\xc2\x41\x01\x01\x22\x13\xd6\x39\x58\x43\x92\xa9\x45\x31\xc9\xa2\x21\x47\x44\x35\xcc\x00\x01\x23\x10\xc5\x69\x57\x5a\xa8\x03\x27\xcd\x96\x0c\xe1\xa0\x00\x01\x24\x0e\xa6\x89\x38\x43\x0e\x84\xc6\x61\x10\x1d\x03\x01\x25\x0d\xa5\x89\x17\x43\x1c\x56\x4c\x9a\x5b\x00\x01\x26\x0d\x86\x89\x18\xe1\x30\x88\xc3\x20\x3a\x06\x01\x27\x11\x86\x88\x37\xe1\x90\x25\x92\x94\x64\x49\x96\x64\x49\x16\x01\x28\x0b\xa4\x88\x13\x43\x9a\xf5\x13\x00\x01\x29\x0a\x84\x88\x13\x43\x9a\xf5\x04\x01\x2a\x0a\xa3\x88\x13\x63\xd4\x2f\x00\x01\x2b\x0a\x83\x88\x13\x63\xd4\x0b\x00\x01\x2c\x0b\xa4\x88\x13\x43\x9a\xf5\x13\x00\x01\x2d\x0a\x84\x88\x13\x43\x9a\xf5\x04\x01\x2e\x09\xa2\x69\x13\x49\xbf\x09\x01\x2f\x0a\xa2\x69\x13\x59\xd2\x9b\x00\x01\x30\x09\xa1\x89\x13\xc9\x30\x08\x01\x31\x07\x61\x89\x13\x07\x01\x32\x0b\xa5\x69\x16\x99\xbf\x85\x91\x02\x01\x33\x0d\xa5\x69\x16\x59\x9a\xf9\x16\x46\x0a\x00\x01\x34\x0b\xc4\x68\x13\x43\x9c\xf5\x17\x09\x01\x35\x0b\xa4\x68\x13\x43\x9c\xf5\x45\x02\x01\x36\x13\xd6\x39\x17\xd9\x54\xc9\x6e\x89\x14\x29\x99\x8e\x88\x6a\x98\x01\x01\x37\x11\xd5\x39\x16\x61\x69\x89\x4e\x89\x12\xc9\x9a\x98\x65\x00\x01\x38\x0c\x65\x89\x16\xd1\x12\x9d\x12\x25\x12\x01\x39\x0b\xa5\x89\x16\x3a\x12\xf6\x71\x10\x01\x3a\x0a\xa2\x89\x13\x52\xd2\x2f\x00\x01\x3b\x0d\xd5\x39\x16\x61\x1f\x07\x59\x13\xb3\x0c\x01\x3c\x0b\xd2\x39\x13\x49\xbf\x0d\x89\x02\x01\x3d\x0c\x87\x89\x19\xe1\x2a\xce\xcd\x83\x04\x01\x3e\x0b\x84\x89\x16\xc9\xa4\x6c\xdd\x00\x01\x3f\x13\x89\x89\x1d\x39\x92\x23\x39\x92\x23\x39\xa0\x23\x39\x32\x88\x00\x01\x40\x0a\x86\x89\x1a\x69\x47\xb5\x0a\x01\x41\x0c\x86\x88\x36\x69\x2a\x6a\x6a\x75\x10\x01\x42\x0c\x83\x88\x33\x51\xa4\x24\x52\x0b\x00\x01\x43\x11\xa6\x89\x58\x3a\x12\x6e\x43\xa4\x44\x52\x22\x0d\xd9\x18\x01\x44\x0c\x85\x89\x37\x3a\x90\x98\x34\xb7\x00\x01\x45\x14\xd6\x39\x18\xe1\x36\x44\x4a\x24\x25\xd2\x90\x8d\x39\x22\xaa\x61\x06\x01\x46\x0f\xb5\x39\x17\x89\x49\x73\xcb\x01\x4d\xcc\x22\x00\x01\x47\x12\xa6\x89\x38\x43\x0e\x84\xdb\x10\x29\x91\x94\x48\x43\x36\x06\x01\x48\x0c\x85\x89\x17\x43\x9c\x98\x34\xb7\x00\x01\x49\x13\x89\x88\x1a\x3a\x92\x03\x52\xa2\x4a\x61\x16\x66\x61\x16\x66\x01\x01\x4a\x12\xa6\x69\x18\xe1\x36\x44\x4a\x24\x25\xd2\x90\x8d\x69\xa6\x00\x01\x4b\x0c\x85\x69\x17\x89\x49\x73\x0b\x23\x05\x01\x4c\x0f\xa7\x89\x59\x3b\xb8\x55\x52\xd7\x24\xcb\x26\x00\x01\x4d\x0f\x86\x88\x57\x3b\x32\x24\xd2\x28\x4e\xca\x90\x00\x01\x4e\x0f\xa7\x89\x59\x43\x8e\x6d\x95\xd4\x35\xc9\xb2\x09\x01\x4f\x10\x86\x88\x37\x43\x8e\x0c\x89\x34\x8a\x93\x32\x24\x00\x01\x50\x0f\xa7\x89\x59\x3b\xb8\x55\x52\xd7\x24\xcb\x26\x00\x01\x51\x0f\x86\x88\x57\x3b\x32\x24\xd2\x28\x4e\xca\x90\x00\x01\x52\x12\x8b\x88\x5b\xc3\x25\xac\x86\xe9\x90\xa4\x61\x5a\x8c\x87\x03\x01\x53\x0f\x69\x89\x3a\x17\x29\xd3\x86\x2d\x94\xc4\x65\x08\x01\x54\x12\xa6\x89\x57\x3a\x32\x44\x59\x92\x25\x51\xb4\x45\xb5\x24\x13\x01\x55\x0b\x83\x89\x15\x62\xb2\x44\x4d\x00\x01\x56\x15\xd6\x39\x17\x43\x94\x25\x59\x12\x45\x5b\x54\x4b\x32\x1d\x11\xd5\x30\x03\x01\x57\x0d\xb3\x39\x15\xc9\x12\xb5\x2a\x52\x12\x01\x01\x58\x13\xa6\x89\x37\x43\x0e\x0c\x51\x96\x64\x49\x14\x6d\x51\x2d\xc9\x04\x01\x59\x0c\x84\x88\x15\x43\x9a\x24\x52\xd6\x04\x01\x5a\x0e\xa5\x88\x36\x3a\xb2\x84\xa9\x9a\x6a\x83\x02\x01\x5b\x0c\x84\x89\x36\xf2\x90\x89\xd3\x90\x00\x01\x5c\x0f\xa5\x88\x16\x43\x0e\x2c\x61\xaa\xa6\xda\xa0\x00\x01\x5d\x0d\x84\x89\x16\x43\x3a\x64\xe2\x34\x24\x00\x01\x5e\x0e\xa5\x68\x36\x4b\x98\xaa\xa9\x36\x88\x99\x02\x01\x5f\x0d\x84\x69\x36\x43\x26\x4e\x43\x16\x29\x00\x01\x60\x0e\x95\x88\x56\xcb\x12\xa6\x6a\xaa\x0d\x0a\x00\x01\x61\x0c\x74\x89\x16\xd3\x90\x89\xd3\x90\x00\x01\x62\x0d\xa7\x68\x17\x87\x2c\xee\x3b\x90\x4a\x00\x01\x63\x0c\x94\x68\x34\xd1\x90\x64\x8d\x5a\x24\x01\x64\x0d\xa7\x88\x57\x43\x8e\x0c\x43\x16\xf7\x0d\x01\x65\x0d\x86\x88\x97\x4a\x36\x0c\x49\xda\x2c\x01\x01\x66\x0d\x87\x88\x17\x87\x2c\x4e\x87\x30\x6e\x03\x01\x67\x0d\x74\x88\x34\xd1\x90\x44\x43\x92\x85\x02\x01\x68\x0d\xa6\x89\x38\x43\x0e\x84\x7e\x4c\x86\x04\x01\x69\x0c\x85\x89\x37\x43\x9a\x79\x52\x94\x00\x01\x6a\x0d\xa6\x89\x58\x3b\x10\xfa\x31\x19\x12\x00\x01\x6b\x0b\x85\x89\x37\x73\xe6\x49\x51\x02\x01\x6c\x0d\xa6\x89\x38\x43\x0e\x84\x7e\x4c\x86\x04\x01\x6d\x0c\x85\x89\x37\x43\x9a\x79\x52\x94\x00\x01\x6e\x0f\xc6\x89\x58\x5a\x94\xe9\x48\xe8\xc7\x64\x48\x00\x01\x6f\x0d\xa5\x89\x57\x52\x49\xce\x3c\x29\x4a\x00\x01\x70\x0d\xa6\x89\x58\x3b\x10\xfa\x31\x19\x12\x00\x01\x71\x0b\x85\x89\x37\x73\xe6\x49\x51\x02\x01\x72\x0d\xa6\x69\x18\xa1\x1f\x93\x21\x87\x24\x00\x01\x73\x0c\x85\x69\x17\x99\x27\x45\xc9\x01\x05\x01\x74\x16\xa9\x88\x79\x43\x0e\x66\x99\x56\xe9\x94\x34\x25\x4d\x49\x9b\x22\x66\x11\x00\x01\x75\x11\x88\x88\x58\x43\x8e\x45\x4e\x49\xa2\x44\x4a\xc9\x29\x02\x01\x76\x10\xa7\x88\x57\x43\x8e\x68\x4a\x56\x09\x93\x34\x6e\x03\x01\x77\x10\xa6\x68\x36\x43\x0e\x84\x49\xd4\xb6\x89\x62\xa8\x01\x01\x78\x0f\x97\x88\x57\x49\xa4\x29\x59\x25\x4c\xd2\xb8\x0d\x01\x79\x0e\xa6\x88\x57\x3a\x32\x8c\xa1\xd8\x1a\x0e\x03\x01\x7a\x0d\x85\x89\x56\xf2\xa0\x65\x92\x96\x0d\x02\x01\x7b\x0e\xa6\x88\x77\x39\x32\x8c\xa1\xd8\x1a\x0e\x03\x01\x7c\x0e\x85\x89\x56\x39\x30\x68\x99\xa4\x65\x83\x00\x01\x7d\x0d\x96\x88\x57\xd2\x30\x86\x62\x6b\x38\x0c\x01\x7e\x0d\x75\x89\x56\xca\xa0\x65\x92\x96\x0d\x02\x01\x86\x0e\x86\x89\x18\x43\x94\xc5\x69\x55\x4b\x86\x08\x01\x89\x13\x88\x88\x38\x83\x16\x46\xe9\xa0\x25\x69\x92\x26\x61\x34\x48\x00\x01\x8e\x0c\x85\x89\x16\x83\xd8\x32\x84\xe1\x30\x01\x92\x0e\xa5\x69\x57\x53\x16\x66\x5b\x98\x69\x61\x08\x01\x97\x0c\x84\x88\x33\x59\x69\x48\xb2\x26\x00\x01\x9a\x0c\x84\x88\x33\x59\x69\x48\xb2\x26\x00\x01\x9d\x14\xa8\x67\x58\x61\xa4\x45\x53\x25\x6a\x89\xa2\x29\x93\xc2\x28\x95\x01\x01\x9f\x0f\x87\x89\x59\x5b\x25\x1d\xae\x6a\x92\x65\x13\x00\x01\xa0\x11\x89\x89\x59\x93\x92\x45\xea\x90\x46\x69\x94\x66\xd5\x11\x01\xa1\x10\x68\x88\x37\x43\x32\x44\x8a\x38\x84\x91\x6d\xc8\x00\x01\xa7\x0e\x85\x88\x36\x6b\x18\x49\x59\x98\x25\x43\x00\x01\xa8\x0b\x64\x89\x16\x63\x34\x44\x95\x01\x01\xae\x0c\xa7\x68\x17\x87\x2c\xee\x77\x40\x01\x01\xaf\x13\x88\x89\x18\xe1\x10\x26\xe2\x10\x46\x61\x14\x46\x61\x36\x64\x00\x01\xb0\x10\x67\x89\x17\xd9\x90\x25\xda\x90\x45\x91\xa6\x44\x00\x01\xb5\x0e\x86\x88\x17\xc7\x50\x1a\xb2\x30\x0d\x87\x01\x01\xb6\x0d\x65\x89\x16\x83\x16\x0d\x89\x96\x0d\x02\x01\xbb\x0c\x85\x89\x37\x4b\x16\x0e\x93\xdb\x20\x01\xbc\x0d\x84\x89\x17\x83\x96\x8d\x59\x36\x24\x00\x01\xc0\x08\xa1\x69\x13\x0f\x01\x01\xc2\x0e\xa6\x78\x77\x69\xd3\xb0\x45\xc3\x96\x96\x00\x01\xc3\x08\x81\x89\x13\x97\x00\x01\xcd\x12\xa7\x88\x58\x43\x0e\xa6\x63\x12\x26\x59\x69\x90\xb2\x24\x0d\x01\xce\x10\x85\x89\x16\x43\x0e\x2c\x51\xb4\x44\x49\x14\x29\x01\x01\xcf\x0b\xa4\x88\x13\x43\x9a\xf5\x13\x00\x01\xd0\x0a\x84\x88\x13\x43\x9a\xf5\x04\x01\xd1\x0f\xa7\x89\x59\x43\x8e\x6d\x95\xd4\x35\xc9\xb2\x09\x01\xd2\x10\x86\x88\x37\x43\x8e\x0c\x89\x34\x8a\x93\x32\x24\x00\x01\xd3\x0d\xa6\x89\x38\x43\x0e\x84\x7e\x4c\x86\x04\x01\xd4\x0c\x85\x89\x37\x43\x9a\x79\x52\x94\x00\x01\xd5\x0e\xb6\x89\x38\x3b\x94\x44\xa1\x1f\x93\x21\x01\x01\xd6\x0d\x95\x89\x37\x3b\x90\xd4\x3c\x29\x4a\x00\x01\xd7\x0e\xb6\x89\x58\x3a\x94\x44\xa1\x1f\x93\x21\x01\x01\xd8\x0d\x95\x89\x57\x3a\x90\xd4\x3c\x29\x4a\x00\x01\xd9\x0e\xb6\x89\x38\x43\x8e\x94\x42\x3f\x26\x43\x02\x01\xda\x0d\x95\x89\x37\x43\x9c\xd4\x3c\x29\x4a\x00\x01\xdb\x0e\xb6\x89\x58\x3a\x94\x44\xa1\x1f\x93\x21\x01\x01\xdc\x0d\x95\x89\x57\x3a\x90\xd4\x3c\x29\x4a\x00\x01\xdd\x0d\x65\x88\x16\x43\x28\x0e\x43\x96\x2c\x00\x01\xde\x13\xb7\x88\x58\x3b\x98\xa4\xe9\x98\x84\x49\x56\x1a\xa4\x2c\x49\x03\x01\xdf\x11\x95\x89\x36\x3b\x90\x44\x4b\x14\x2d\x51\x12\x45\x4a\x00\x01\xe0\x13\xc7\x88\x58\x3b\x9a\xc3\xe9\x98\x84\x49\x56\x1a\xa4\x2c\x49\x03\x01\xe1\x11\xa5\x89\x36\x3b\x92\x23\x4b\x14\x2d\x51\x12\x45\x4a\x00\x01\xe2\x16\xaa\x88\x9a\x3b\x6d\x18\x75\x20\xc9\x81\x64\xc8\xa2\x74\x50\xb3\x30\x1c\x04\x01\xe3\x11\x87\x89\x59\x3b\xa6\x28\x51\x65\x18\xa2\x2c\x92\x96\x01\x01\xe4\x0f\x86\x89\x58\x43\x92\xa9\x45\x65\x48\xb2\x68\x08\x01\xe5\x0d\x85\x69\x37\xc3\xa4\xd9\x86\x71\x50\x00\x01\xe6\x11\xa6\x89\x38\x43\x0e\x0d\x49\xa6\x16\xc5\x24\x8b\x86\x00\x01\xe7\x0f\xa5\x69\x37\x43\x3c\x4c\x9a\x2d\x19\xc2\x41\x01\x01\xe8\x11\xa6\x89\x37\x43\x0e\x64\x53\x25\xbb\x25\x52\xa4\x64\x02\x01\xe9\x0f\xa5\x89\x16\x43\x1c\x96\x96\xe8\x94\x28\x91\x00\x01\xea\x0f\xa7\x69\x59\x5b\x25\x75\x4d\xb2\x6c\x47\x25\x00\x01\xeb\x0f\x86\x68\x37\x43\x22\x8d\xe2\xa4\x0c\x39\x24\x01\x01\xec\x10\xc7\x69\x59\x3b\xb8\x55\x52\xd7\x24\xcb\x76\x54\x02\x01\xed\x11\xa6\x68\x57\x3b\x32\x24\xd2\x28\x4e\xca\x90\x43\x12\x00\x01\xf0\x0b\xa4\x68\x13\x43\x9c\xf5\x45\x02\x01\xf4\x10\xa6\x89\x58\x3a\x36\x24\x99\x5a\x14\x93\x2c\x1a\x02\x01\xf5\x0f\xa5\x69\x57\x3a\x70\xd2\x6c\xc9\x10\x0e\x0a\x00\x01\xf8\x11\xa6\x89\x58\x3a\x12\x6e\x43\xa4\x44\x52\x22\x0d\xd9\x18\x01\xf9\x0c\x85\x89\x37\x3a\x90\x98\x34\xb7\x00\x01\xfa\x14\xc7\x88\x78\x3a\x2a\x46\xa1\x38\x26\x61\x92\x95\x06\x29\x4b\xd2\x00\x01\xfb\x11\xa5\x89\x36\x3a\x22\x95\x96\x28\x5a\xa2\x24\x8a\x94\x00\x01\xfc\x16\xaa\x88\xba\x3a\x6d\x18\x75\x20\xc9\x81\x64\xc8\xa2\x74\x50\xb3\x30\x1c\x04\x01\xfd\x11\x87\x89\x79\x3a\xa6\x28\x51\x65\x18\xa2\x2c\x92\x96\x01\x01\xfe\x14\xa7\x89\x79\x3a\x38\x28\x59\x92\x25\x92\x62\x52\xb2\x24\x4b\x06\x09\x01\xff\x10\x86\x88\x57\x3a\x34\x0c\xd1\x94\x28\xd1\x61\x48\x00\x02\x00\x11\xa7\x88\x58\x3b\x9a\x8e\x49\x98\x64\xa5\x41\xca\x92\x34\x02\x01\x0f\x85\x89\x36\x3b\xb0\x44\xd1\x12\x25\x51\xa4\x04\x02\x02\x12\xa7\x88\x58\x43\x0e\xa6\x63\x12\x26\x59\x69\x90\xb2\x24\x0d\x02\x03\x10\x85\x89\x16\x43\x0e\x2c\x51\xb4\x44\x49\x14\x29\x01\x02\x04\x0e\xa5\x89\x36\xf3\x30\x86\x43\x12\x16\x07\x01\x02\x05\x0e\x85\x88\x36\x3b\xb0\x64\xc3\x10\x8a\x43\x00\x02\x06\x0e\xa5\x89\x16\x43\x3c\x8c\xe1\x90\x84\xc5\x41\x02\x07\x0e\x85\x88\x36\x43\xbc\x64\xc3\x10\x8a\x43\x00\x02\x08\x0a\xa3\x88\x13\x63\xd4\x2f\x00\x02\x09\x0a\x83\x88\x13\x63\xd4\x0b\x00\x02\x0a\x0b\xa4\x88\x13\x43\x9a\xf5\x13\x00\x02\x0b\x0a\x84\x88\x13\x43\x9a\xf5\x04\x02\x0c\x0f\xa7\x89\x59\x3b\xb8\x55\x52\xd7\x24\xcb\x26\x00\x02\x0d\x0f\x86\x88\x57\x3b\x32\x24\xd2\x28\x4e\xca\x90\x00\x02\x0e\x0f\xa7\x89\x59\x43\x8e\x6d\x95\xd4\x35\xc9\xb2\x09\x02\x0f\x10\x86\x88\x37\x43\x8e\x0c\x89\x34\x8a\x93\x32\x24\x00\x02\x10\x12\xa6\x89\x37\x3b\x32\x44\x59\x92\x25\x51\xb4\x45\xb5\x24\x13\x02\x11\x0b\x83\x89\x15\x5b\xb2\x44\x4d\x00\x02\x12\x13\xa6\x89\x37\x43\x0e\x0c\x51\x96\x64\x49\x14\x6d\x51\x2d\xc9\x04\x02\x13\x0c\x84\x88\x15\x43\x9a\x24\x52\xd6\x04\x02\x14\x0d\xa6\x89\x58\x3b\x10\xfa\x31\x19\x12\x00\x02\x15\x0b\x85\x89\x37\x73\xe6\x49\x51\x02\x02\x16\x0d\xa6\x89\x38\x43\x0e\x84\x7e\x4c\x86\x04\x02\x17\x0c\x85\x89\x37\x43\x9a\x79\x52\x94\x00\x02\x18\x11\xd5\x38\x36\x4b\x98\xaa\xa9\x36\xe8\x88\x26\x66\x11\x00\x02\x19\x0e\xb4\x39\x36\x43\x26\x4e\x43\x2c\x69\x51\x04\x02\x1a\x0e\xd7\x38\x17\x87\x2c\xee\x3b\xac\xca\x69\x06\x02\x1b\x0f\xc4\x38\x34\xd1\x90\x64\x8d\xaa\xa4\x45\x11\x00\x02\x1e\x0e\xa6\x89\x38\x43\x0e\x84\xc6\x61\x10\x1d\x03\x02\x1f\x0d\xa5\x89\x17\x43\x1c\x56\x4c\x9a\x5b\x00\x02\x26\x11\xa7\x88\x78\x39\x9c\x8e\x49\x98\x64\xa5\x41\xca\x92\x34\x02\x27\x0f\x85\x89\x56\x39\xb2\x44\xd1\x12\x25\x51\xa4\x04\x02\x28\x0f\xa5\x69\x16\xc7\x70\x48\xc2\xe2\x20\x65\x12\x00\x02\x29\x0e\x85\x68\x36\x4b\x36\x0c\xa1\x38\x64\x99\x02\x02\x2a\x10\xb7\x89\x59\x3b\x98\x84\x5b\x25\x75\x4d\xb2\x6c\x02\x02\x2b\x11\x96\x88\x37\x3b\x94\x64\x43\x22\x8d\xe2\xa4\x0c\x09\x00\x02\x2c\x10\xb7\x89\x59\x3b\x38\x64\x5b\x25\x75\x4d\xb2\x6c\x02\x02\x2d\x11\x96\x88\x57\x3b\x32\x44\x43\x22\x8d\xe2\xa4\x0c\x09\x00\x02\x2e\x0f\xa7\x89\x79\x39\xba\x55\x52\xd7\x24\xcb\x26\x00\x02\x2f\x0f\x86\x88\x77\x39\x34\x24\xd2\x28\x4e\xca\x90\x00\x02\x30\x10\xc7\x89\x59\x3b\x9a\xa3\x5b\x25\x75\x4d\xb2\x6c\x02\x02\x31\x11\xa6\x88\x57\x3b\x96\x43\x43\x22\x8d\xe2\xa4\x0c\x09\x00\x02\x32\x0f\xa7\x88\x57\x3b\xa4\x29\x59\x25\x4c\xd2\xb8\x0d\x02\x33\x0f\xa6\x68\x56\x3b\x10\x26\x51\xdb\x26\x8a\xa1\x06\x02\x50\x0d\x65\x89\x16\x89\x54\x89\x92\xa9\xb2\x00\x02\x54\x0b\x64\x89\x16\x9b\x56\x1a\x14\x00\x02\x58\x0c\x65\x88\x36\x4b\x36\x8c\xd9\xb0\x00\x02\x59\x0d\x65\x88\x16\x43\x28\x0e\x43\x96\x2c\x00\x02\x5f\x0c\x94\x58\x54\x59\xa7\x29\x4b\x24\x00\x02\x65\x0b\x85\x69\x17\x99\x27\x45\x09\x0b\x02\x75\x0e\x66\x88\x37\x43\x22\x0d\xc7\x49\x19\x12\x00\x02\x79\x09\x63\x89\x55\x5d\x96\x00\x02\x87\x0c\x74\x88\x14\x62\xd6\x32\x44\x09\x00\x02\x88\x0b\x94\x68\x34\xd1\x90\x64\x3d\x0a\x02\x89\x0d\x65\x89\x17\x99\x36\x0c\x99\xa4\x28\x01\x02\x8c\x0c\x66\x88\x56\xa2\x36\x44\x5d\xc2\x00\x02\x8d\x0e\x68\x88\x58\x91\x53\x92\x48\x89\x52\x72\x0a\x02\x8e\x0c\x86\x88\x76\x62\x68\xdb\xa2\x96\x30\x02\x9e\x0d\x85\x69\x16\x52\xa2\x44\xa7\x64\x0a\x0b\x02\xbb\x08\x32\xd8\x13\x8b\x00\x02\xbc\x08\x32\xd8\x13\xca\x00\x02\xbd\x08\x32\xd8\x13\x8b\x00\x00\x00'
| 11,728.5 | 23,456 | 0.749925 | 5,864 | 23,457 | 2.999829 | 0.043827 | 0.02831 | 0.008186 | 0.008868 | 0.425672 | 0.380138 | 0.354215 | 0.320277 | 0.285316 | 0.250014 | 0 | 0.408851 | 0.000043 | 23,457 | 1 | 23,457 | 23,457 | 0.341107 | 0 | 0 | 0 | 0 | 1 | 0.999616 | 0.999616 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
2740395afa081278cc0e7f078baa087907701fee | 1,449 | py | Python | jans-pycloudlib/jans/pycloudlib/persistence/__init__.py | JanssenProject/jans | 8d57d01b998bfe87a2377bbe9023dd97fb03cc9f | [
"Apache-2.0"
] | 18 | 2022-01-13T13:45:13.000Z | 2022-03-30T04:41:18.000Z | jans-pycloudlib/jans/pycloudlib/persistence/__init__.py | JanssenProject/jans | 8d57d01b998bfe87a2377bbe9023dd97fb03cc9f | [
"Apache-2.0"
] | 604 | 2022-01-13T12:32:50.000Z | 2022-03-31T20:27:36.000Z | jans-pycloudlib/jans/pycloudlib/persistence/__init__.py | JanssenProject/jans | 8d57d01b998bfe87a2377bbe9023dd97fb03cc9f | [
"Apache-2.0"
] | 8 | 2022-01-28T00:23:25.000Z | 2022-03-16T05:12:12.000Z | # noqa: D104
from jans.pycloudlib.persistence.couchbase import render_couchbase_properties # noqa: F401
from jans.pycloudlib.persistence.couchbase import sync_couchbase_truststore # noqa: F401
from jans.pycloudlib.persistence.couchbase import id_from_dn # noqa: F401
from jans.pycloudlib.persistence.couchbase import CouchbaseClient # noqa: F401
from jans.pycloudlib.persistence.hybrid import render_hybrid_properties # noqa: F401
from jans.pycloudlib.persistence.ldap import render_ldap_properties # noqa: F401
from jans.pycloudlib.persistence.ldap import sync_ldap_truststore # noqa: F401
from jans.pycloudlib.persistence.ldap import LdapClient # noqa: F401
from jans.pycloudlib.persistence.sql import render_sql_properties # noqa: F401
from jans.pycloudlib.persistence.sql import doc_id_from_dn # noqa: F401
from jans.pycloudlib.persistence.sql import SqlClient # noqa: F401
from jans.pycloudlib.persistence.spanner import render_spanner_properties # noqa: F401
from jans.pycloudlib.persistence.spanner import SpannerClient # noqa: F401
from jans.pycloudlib.persistence.utils import PersistenceMapper # noqa: F401
from jans.pycloudlib.persistence.utils import PERSISTENCE_TYPES # noqa: F401
from jans.pycloudlib.persistence.utils import PERSISTENCE_SQL_DIALECTS # noqa: F401
from jans.pycloudlib.persistence.utils import render_salt # noqa: F401
from jans.pycloudlib.persistence.utils import render_base_properties # noqa: F401
| 72.45 | 91 | 0.835059 | 189 | 1,449 | 6.269841 | 0.15873 | 0.121519 | 0.273418 | 0.440506 | 0.823629 | 0.823629 | 0.755274 | 0.71308 | 0.362869 | 0 | 0 | 0.043779 | 0.101449 | 1,449 | 19 | 92 | 76.263158 | 0.866359 | 0.143547 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
279b063189daf2f671e739472595eb6738cd2837 | 20,144 | py | Python | epipack/process_conversions.py | PaPeK/epipack | 52fb26ba35672fbce1f2f598eac2ed71e6fcb90f | [
"MIT"
] | 25 | 2020-10-22T10:59:10.000Z | 2022-01-05T02:24:21.000Z | epipack/process_conversions.py | PaPeK/epipack | 52fb26ba35672fbce1f2f598eac2ed71e6fcb90f | [
"MIT"
] | 11 | 2021-03-06T10:18:14.000Z | 2021-09-06T21:34:46.000Z | epipack/process_conversions.py | PaPeK/epipack | 52fb26ba35672fbce1f2f598eac2ed71e6fcb90f | [
"MIT"
] | 2 | 2021-06-17T14:26:03.000Z | 2021-08-13T02:11:00.000Z |
def processes_to_rates(process_list, compartments, ignore_rate_position_checks=False):
"""
Converts a list of reaction process tuples to rate tuples
Parameters
----------
process_list : :obj:`list` of :obj:`tuple`
A list containing reaction processes in terms of tuples.
.. code:: python
[
# transition process
( source_compartment, rate, target_compartment),
# transmission process
( coupling_compartment_0, coupling_compartment_1, rate, target_compartment_0, target_ccompartment_1),
# fission process
( source_compartment, rate, target_compartment_0, target_ccompartment_1),
# fusion process
( source_compartment_0, source_compartment_1, rate, target_compartment),
# death process
( source_compartment, rate, None),
# birth process
( None, rate, target_compartment),
]
compartments : :obj:`list` of hashable type
The compartments of these reaction equations.
ignore_rate_position_checks : bool, default = False
This function usually checks whether the rate of
a reaction is positioned correctly. You can
turn this behavior off for transition, birth, death, and
transmission processes. (Useful if you want to define
symbolic transmission processes that are compartment-dependent).
Returns
-------
quadratic_rates : :obj:`list` of :obj:`tuple`
Rate tuples for quadratic terms
linear_rates : :obj:`list` obj:`tuple`
Rate tuples for linear terms
"""
quadratic_rates = []
linear_rates = []
for process in process_list:
if len(process) == 3:
# it's either a transition process or a birth process:
if ignore_rate_position_checks or process[1] not in compartments:
linear_rates.extend(transition_processes_to_rates([process]))
else:
raise TypeError("Process " + str(tuple(process)) + " is not understood.")
elif len(process) == 4:
# it's either a fission process or a fusion process
if process[1] not in compartments:
# it's a fission process
linear_rates.extend(fission_processes_to_rates([process]))
elif process[2] not in compartments:
# it's a fusion process
quadratic_rates.extend(fusion_processes_to_rates([process]))
else:
raise TypeError("Process " + str(tuple(process)) + " is not understood because it's ambiguous in whether it's supposed to be a fission or a fusion process. Please use model.add_fission_processes() or model.add_fusion_processes() to set this process.")
elif len(process) == 5:
# it's a transmission process
if ignore_rate_position_checks or process[2] not in compartments:
quadratic_rates.extend(transmission_processes_to_rates([process]))
else:
raise TypeError("Process " + str(tuple(process)) + " is not understood.")
else:
raise TypeError("Process " + str(tuple(process)) + " is not understood.")
return quadratic_rates, linear_rates
def transition_processes_to_rates(process_list):
"""
Define the transition processes between compartments, including birth and deaths processes.
Parameters
==========
process_list : :obj:`list` of :obj:`tuple`
A list of tuples that contains transitions rates in the following format:
.. code:: python
[
( source_compartment, rate, target_compartment ),
...
]
Example
-------
For an SEIR model.
.. code:: python
transition_processes_to_rates([
("E", symptomatic_rate, "I" ),
("I", recovery_rate, "R" ),
])
"""
linear_rates = []
for source, rate, target in process_list:
if source is None and target is None:
raise ValueError("The reaction" + str((source, rate, target)) + " is meaningless because there are no reactants.")
elif source == target:
raise ValueError("Process "+\
str((source, rate, target)) +\
" leaves system unchanged")
elif source is None and target is not None:
#birth process
linear_rates.append( (None, target, rate) )
elif source is not None and target is None:
#death process
linear_rates.append( (source, source, -rate) )
else:
# source compartment loses an entity
# target compartment gains one
linear_rates.append((source, source, -rate))
linear_rates.append((source, target, +rate))
return linear_rates
def fission_processes_to_rates(process_list):
"""
Define linear fission processes between compartments.
Parameters
==========
process_list : :obj:`list` of :obj:`tuple`
A list of tuples that contains fission rates in the following format:
.. code:: python
[
(source_compartment, rate, target_compartment_0, target_compartment_1 ),
...
]
Example
-------
For pure exponential growth of compartment `B`.
.. code:: python
epi.set_fission_processes([
("B", growth_rate, "B", "B" ),
])
"""
linear_rates = []
for source, rate, target0, target1 in process_list:
_s = source
_t0 = target0
_t1 = target1
# source compartment loses an entity
# target compartments gains one each
linear_rates.append((_s, _s, -rate))
linear_rates.append((_s, _t0, +rate))
linear_rates.append((_s, _t1, +rate))
return linear_rates
def fusion_processes_to_rates(process_list):
"""
Define fusion processes between compartments.
Parameters
==========
process_list : :obj:`list` of :obj:`tuple`
A list of tuples that contains fission rates in the following format:
.. code:: python
[
(coupling_compartment_0, coupling_compartment_1, rate, target_compartment_0 ),
...
]
Example
-------
Fusion of reactants "A", and "B" to form "C".
.. code:: python
fusion_processes_to_rates([
("A", "B", reaction_rate, "C" ),
])
"""
quad_rates = []
for source0, source1, rate, target in process_list:
# target compartment gains one entity
quad_rates.append((source0, source1, target, rate))
# source compartments lose one entity each
quad_rates.append((source0, source1, source0, -rate))
quad_rates.append((source0, source1, source1, -rate))
return quad_rates
def transmission_processes_to_rates(process_list):
r"""
A wrapper to define quadratic process rates through transmission reaction equations.
Note that in stochastic network/agent simulations, the transmission
rate is equal to a rate per link. For the mean-field ODEs,
the rates provided to this function will just be equal
to the prefactor of the respective quadratic terms.
For instance, if you analyze an SIR system and simulate on a network of mean degree :math:`k_0`,
a basic reproduction number :math:`R_0`, and a recovery rate :math:`\mu`,
you would define the single link transmission process as
.. code:: python
("I", "S", R_0/k_0 * mu, "I", "I")
For the mean-field system here, the corresponding reaction equation would read
.. code:: python
("I", "S", R_0 * mu, "I", "I")
Parameters
----------
process_list : :obj:`list` of :obj:`tuple`
A list of tuples that contains transitions rates in the following format:
.. code:: python
[
(source_compartment,
target_compartment_initial,
rate
source_compartment,
target_compartment_final,
),
...
]
Example
-------
For an SEIR model.
.. code:: python
transmission_processes_to_rates([
("I", "S", +1, "I", "E" ),
])
"""
rate_list = []
# iterate through processes
for coupling0, coupling1, rate, affected0, affected1 in process_list:
_s0 = coupling0
_s1 = coupling1
_t0 = affected0
_t1 = affected1
reactants = [_s0, _s1]
products = [_t0, _t1]
constant = (set.intersection(set(reactants), set(products)))
if len(constant) == 2 or tuple(reactants) == tuple(products):
raise ValueError("Process "+\
str((coupling0, coupling1, rate, affected0, affected1)) +\
" leaves system unchanged")
elif len(constant) == 1:
constant = next(iter(constant))
reactants.remove(constant)
products.remove(constant)
_s0 = constant
_s1 = reactants[0]
_t1 = products[0]
# if one compartment (_s0) remains constant,
# there's no changes in counts of this compartment.
# Rather the source compartment of the other reactant
# loses one entity, while the target compartment
# of this reactant gains one.
rate_list.append( (_s0, _s1, _s1, -rate) )
rate_list.append( (_s0, _s1, _t1, +rate) )
else:
# when no reactant remains constant,
# all source compartments lose one entity
# and all target compartments gain one
rate_list.append( (_s0, _s1, _s1, -rate) )
rate_list.append( (_s0, _s1, _t1, +rate) )
rate_list.append( (_s0, _s1, _s0, -rate) )
rate_list.append( (_s0, _s1, _t0, +rate) )
return rate_list
def processes_to_events(process_list, compartments, ignore_rate_position_checks=False):
"""
Converts a list of reaction process tuples to event tuples
Parameters
----------
process_list : :obj:`list` of :obj:`tuple`
A list containing reaction processes in terms of tuples.
.. code:: python
[
# transition process
( source_compartment, rate, target_compartment),
# transmission process
( coupling_compartment_0, coupling_compartment_1, rate, target_compartment_0, target_ccompartment_1),
# fission process
( source_compartment, rate, target_compartment_0, target_compartment_1),
# fusion process
( source_compartment_0, source_compartment_1, rate, target_compartment),
# death process
( source_compartment, rate, None),
# birth process
( None, rate, target_compartment),
]
compartments : :obj:`list` of hashable type
The compartments of these reaction equations.
ignore_rate_position_checks : bool, default = False
This function usually checks whether the rate of
a reaction is positioned correctly. You can
turn this behavior off for transition, birth, death, and
transmission processes. (Useful if you want to define
symbolic transmission processes that are compartment-dependent).
Returns
-------
quadratic_events : :obj:`list` of :obj:`tuple`
event tuples for quadratic terms
linear_events : :obj:`list` obj:`tuple`
event tuples for linear terms
"""
quadratic_events = []
linear_events = []
for process in process_list:
if len(process) == 3:
# it's either a transition process or a birth process:
if ignore_rate_position_checks or process[1] not in compartments:
linear_events.extend(transition_processes_to_events([process]))
else:
raise TypeError("Process " + str(tuple(process)) + " is not understood.")
elif len(process) == 4:
# it's either a fission process or a fusion process
if process[1] not in compartments:
# it's a fission process
linear_events.extend(fission_processes_to_events([process]))
elif process[2] not in compartments:
# it's a fusion process
quadratic_events.extend(fusion_processes_to_events([process]))
else:
raise TypeError("Process " + str(tuple(process)) + " is not understood because it's ambiguous in whether it's supposed to be a fission or a fusion process. Please use model.add_fission_processes() or model.add_fusion_processes() to set this process.")
elif len(process) == 5:
# it's a transmission process
if ignore_rate_position_checks or process[2] not in compartments:
quadratic_events.extend(transmission_processes_to_events([process]))
else:
raise TypeError("Process " + str(tuple(process)) + " is not understood.")
else:
raise TypeError("Process " + str(tuple(process)) + " is not understood.")
return quadratic_events, linear_events
def transition_processes_to_events(process_list):
"""
Define the transition processes between compartments, including birth and deaths processes.
Parameters
==========
process_list : :obj:`list` of :obj:`tuple`
A list of tuples that contains transitions rates in the following format:
.. code:: python
[
( source_compartment, rate, target_compartment ),
...
]
Example
-------
For an SEIR model.
.. code:: python
transition_processes_to_events([
("E", symptomatic_rate, "I" ),
("I", recovery_rate, "R" ),
])
"""
linear_events = []
for source, rate, target in process_list:
if source is None and target is None:
raise ValueError("The reaction" + str((source, rate, target)) + " is meaningless because there are no reactants.")
elif source == target:
raise ValueError("Process "+\
str((source, rate, target)) +\
" leaves system unchanged")
elif source is None and target is not None:
#birth process
linear_events.append( ( (None,), rate, [(target, +1)] ))
elif source is not None and target is None:
#death process
linear_events.append(( (source,), rate, [(source, -1)] ))
else:
# source compartment loses an entity
# target compartment gains one
linear_events.append(( (source,), rate, [(source, -1), (target, +1)] ))
return linear_events
def fission_processes_to_events(process_list):
"""
Define linear fission processes between compartments.
Parameters
==========
process_list : :obj:`list` of :obj:`tuple`
A list of tuples that contains fission rates in the following format:
.. code:: python
[
(source_compartment, rate, target_compartment_0, target_compartment_1 ),
...
]
Example
-------
For pure exponential growth of compartment `B`.
.. code:: python
epi.set_fission_processes([
("B", growth_rate, "B", "B" ),
])
"""
linear_events = []
for source, rate, target0, target1 in process_list:
_s = source
_t0 = target0
_t1 = target1
# source compartment loses an entity
# target compartments gains one each
linear_events.append(( (_s,), rate, [(_s, -1), (_t0, +1), (_t1, +1)] ))
return linear_events
def fusion_processes_to_events(process_list):
"""
Define fusion processes between compartments.
Parameters
==========
process_list : :obj:`list` of :obj:`tuple`
A list of tuples that contains fission rates in the following format:
.. code:: python
[
(coupling_compartment_0, coupling_compartment_1, rate, target_compartment_0 ),
...
]
Example
-------
Fusion of reactants "A", and "B" to form "C".
.. code:: python
fusion_processes_to_events([
("A", "B", reaction_rate, "C" ),
])
"""
quad_events = []
for source0, source1, rate, target in process_list:
# target compartment gains one entity
# source compartments lose one entity each
quad_events.append(( (source0, source1), rate, [(target, +1),(source0, -1), (source1, -1)] ))
return quad_events
def transmission_processes_to_events(process_list):
r"""
A wrapper to define quadratic process events through transmission reaction equations.
Note that in stochastic network/agent simulations, the transmission
rate is equal to a rate per link. For the mean-field ODEs,
the rates provided to this function will just be equal
to the prefactor of the respective quadratic terms.
For instance, if you analyze an SIR system and simulate on a network of mean degree :math:`k_0`,
a basic reproduction number :math:`R_0`, and a recovery rate :math:`\mu`,
you would define the single link transmission process as
.. code:: python
("I", "S", R_0/k_0 * mu, "I", "I")
For the mean-field system here, the corresponding reaction equation would read
.. code:: python
("I", "S", R_0 * mu, "I", "I")
Parameters
----------
process_list : :obj:`list` of :obj:`tuple`
A list of tuples that contains transitions events in the following format:
.. code:: python
[
(source_compartment,
target_compartment_initial,
rate
source_compartment,
target_compartment_final,
),
...
]
Example
-------
For an SEIR model.
.. code:: python
transmission_processes_to_events([
("I", "S", +1, "I", "E" ),
])
"""
event_list = []
# iteevent through processes
for coupling0, coupling1, rate, affected0, affected1 in process_list:
_s0 = coupling0
_s1 = coupling1
_t0 = affected0
_t1 = affected1
reactants = [_s0, _s1]
products = [_t0, _t1]
constant = (set.intersection(set(reactants), set(products)))
if len(constant) == 2 or tuple(reactants) == tuple(products):
raise ValueError("Process "+\
str((coupling0, coupling1, rate, affected0, affected1)) +\
" leaves system unchanged")
elif len(constant) == 1:
constant = next(iter(constant))
reactants.remove(constant)
products.remove(constant)
_s0 = constant
_s1 = reactants[0]
_t1 = products[0]
# if one compartment (_s0) remains constant,
# there's no changes in counts of this compartment.
# Rather the source compartment of the other reactant
# loses one entity, while the target compartment
# of this reactant gains one.
event_list.append(( (_s0, _s1), rate, [(_s1, -1),(_t1, +1)] ))
else:
# when no reactant remains constant,
# all source compartments lose one entity
# and all target compartments gain one
event_list.append(( (_s0, _s1), rate, [(_s1, -1), (_t1, +1), (_s0, -1), (_t0, +1)] ))
return event_list
| 31.279503 | 267 | 0.58201 | 2,211 | 20,144 | 5.138399 | 0.093623 | 0.029047 | 0.029575 | 0.012675 | 0.946924 | 0.887598 | 0.867265 | 0.854238 | 0.854238 | 0.839011 | 0 | 0.013502 | 0.323471 | 20,144 | 643 | 268 | 31.328149 | 0.82015 | 0.502879 | 0 | 0.706587 | 0 | 0.011976 | 0.092127 | 0.012839 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05988 | false | 0 | 0 | 0 | 0.11976 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
27fc8a0764aa340e2e28613b5a72a2ef2f1f77fe | 214 | py | Python | src/rangeen/_utils.py | khera-shanu/Rangeen | 0a7f7699c0030d28fd42211c1fb33c89ced3e857 | [
"MIT"
] | null | null | null | src/rangeen/_utils.py | khera-shanu/Rangeen | 0a7f7699c0030d28fd42211c1fb33c89ced3e857 | [
"MIT"
] | null | null | null | src/rangeen/_utils.py | khera-shanu/Rangeen | 0a7f7699c0030d28fd42211c1fb33c89ced3e857 | [
"MIT"
] | null | null | null | class Util:
@staticmethod
def _bg(text, bg_color):
return f"\u001b[{bg_color+10}m{text}\u001b[0m"
@staticmethod
def _fg(text, fg_color):
return f"\u001b[{fg_color}m{text}\u001b[0m"
| 23.777778 | 54 | 0.630841 | 33 | 214 | 3.909091 | 0.424242 | 0.232558 | 0.186047 | 0.263566 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.095808 | 0.219626 | 214 | 8 | 55 | 26.75 | 0.676647 | 0 | 0 | 0.285714 | 0 | 0 | 0.32243 | 0.32243 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | false | 0 | 0 | 0.285714 | 0.714286 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
7e01bb9d34d1a5579402176a9b338a11c28609b7 | 19,619 | py | Python | tests/test_strucdata_single.py | alessioprestileo/freesif | 8a7d519b5a8b2cd48c1f6bab2f648a18456beb8a | [
"MIT"
] | 6 | 2020-06-03T09:28:33.000Z | 2021-12-16T17:28:01.000Z | tests/test_strucdata_single.py | alessioprestileo/freesif | 8a7d519b5a8b2cd48c1f6bab2f648a18456beb8a | [
"MIT"
] | 1 | 2020-10-23T11:48:37.000Z | 2020-10-23T11:48:37.000Z | tests/test_strucdata_single.py | alessioprestileo/freesif | 8a7d519b5a8b2cd48c1f6bab2f648a18456beb8a | [
"MIT"
] | 4 | 2021-09-17T07:34:36.000Z | 2022-02-19T18:12:27.000Z | # -*- coding: utf-8 -*-
"""Test StrucData public methods with varying data and arguments for single
superelement results.
"""
import os
import unittest
import numpy as np
import h5py
import freesif as fs
# TODO: use pytables instead of h5py for the verified data to avoid dependency on h5py
FILES = os.path.join(os.path.dirname(__file__), 'files')
class TestStrucDataCase01(unittest.TestCase):
"""1st order 3-node triangular plate elements, element results
"""
@classmethod
def setUpClass(cls):
# establish StrucData instances and associated verification data
cls._data = fs.open_sif(os.path.join(FILES, 'struc', 'single_super_elem', 'test01_1stord_linstat_R1.SIU'))
cls._f_verified = h5py.File(os.path.join(FILES, 'verified_testdata.h5'), 'r')
cls._gr_verified = cls._f_verified['test01_1stord_linstat_R1/LD_plates/elemres']
@classmethod
def tearDownClass(cls):
cls._data.close()
cls._f_verified.close()
def test_get_nodes(self):
nodes = self._data.get_nodes(sets='LD_plates', kind='shell', disconnected=True)
nodes_verified = self._gr_verified['nodes']
self.assertTrue(np.allclose(nodes, nodes_verified))
def test_get_elements(self):
elems = self._data.get_elements(sets='LD_plates', kind='shell', disconnected=True)
connectivity = self._gr_verified['connectivity']
offset = self._gr_verified['offset']
eltyp = self._gr_verified['eltyp']
self.assertTrue(np.allclose(elems[0], connectivity))
self.assertTrue(np.allclose(elems[1], offset))
self.assertTrue(np.allclose(elems[2], eltyp))
def test_get_elementresults_generalstress(self):
res = self._data.get_elementresults('generalstress', rescases=1, sets='LD_plates')
res_verified = self._gr_verified['generalstress']
self.assertTrue(np.allclose(res, res_verified))
def test_calc_principal2d(self):
gs = self._data.get_elementresults('generalstress', rescases=1, sets='LD_plates')
ps = fs.calc.principal2d(gs)
ps_verified = self._gr_verified['principalstress']
self.assertTrue(np.allclose(ps, ps_verified))
def test_calc_vonmises2d(self):
gs = self._data.get_elementresults('generalstress', rescases=1, sets='LD_plates')
vm = fs.calc.vonmises2d(gs)
vm_verified = self._gr_verified['vonmisesstress']
self.assertTrue(np.allclose(vm, vm_verified))
def test_get_elementresults_decomposed(self):
res = self._data.get_elementresults('decomposedstress', rescases=1, sets='LD_plates')
res_verified = self._gr_verified['decomposedstress']
self.assertTrue(np.allclose(res, res_verified))
def test_calc_principal_m_2d(self):
ds = self._data.get_elementresults('decomposedstress', rescases=1, sets='LD_plates')
ps = fs.calc.principal2d(ds)
ps_verified = self._gr_verified['principalstress_m']
self.assertTrue(np.allclose(ps, ps_verified))
def test_calc_vonmises_m_2d(self):
ds = self._data.get_elementresults('decomposedstress', rescases=1, sets='LD_plates')
vm = fs.calc.vonmises2d(ds)
vm_verified = self._gr_verified['vonmisesstress_m']
self.assertTrue(np.allclose(vm, vm_verified))
class TestStrucDataCase02(unittest.TestCase):
"""1st order 3-node triangular plate elements, node results
"""
@classmethod
def setUpClass(cls):
# establish StrucData instances and associated verification data
cls._data = fs.open_sif(os.path.join(FILES, 'struc', 'single_super_elem', 'test01_1stord_linstat_R1.SIU'))
cls._f_verified = h5py.File(os.path.join(FILES, 'verified_testdata.h5'), 'r')
cls._gr_verified = cls._f_verified['test01_1stord_linstat_R1/LD_plates/noderes']
@classmethod
def tearDownClass(cls):
cls._data.close()
cls._f_verified.close()
def test_get_nodes(self):
nodes = self._data.get_nodes(sets='LD_plates', kind='shell', disconnected=False)
nodes_verified = self._gr_verified['nodes']
self.assertTrue(np.allclose(nodes, nodes_verified))
def test_get_elements(self):
elems = self._data.get_elements(sets='LD_plates', kind='shell', disconnected=False)
connectivity = self._gr_verified['connectivity']
offset = self._gr_verified['offset']
eltyp = self._gr_verified['eltyp']
self.assertTrue(np.allclose(elems[0], connectivity))
self.assertTrue(np.allclose(elems[1], offset))
self.assertTrue(np.allclose(elems[2], eltyp))
def test_get_noderesults_displacement(self):
res = self._data.get_noderesults('displacement', rescases=1,
sets='LD_plates', disconnected=False)
res_verified = self._gr_verified['displacement']
self.assertTrue(np.allclose(res, res_verified))
class TestStrucDataCase03(unittest.TestCase):
"""1st order 2-node beam elements, element results
"""
@classmethod
def setUpClass(cls):
# establish StrucData instances and associated verification data
cls._data = fs.open_sif(os.path.join(FILES, 'struc', 'single_super_elem', 'test01_1stord_linstat_R1.SIU'))
cls._f_verified = h5py.File(os.path.join(FILES, 'verified_testdata.h5'), 'r')
cls._gr_verified = cls._f_verified['test01_1stord_linstat_R1/LD_beams/elemres']
cls._sets = ['LD_chords', 'LD_beams']
@classmethod
def tearDownClass(cls):
cls._data.close()
cls._f_verified.close()
def test_get_nodes(self):
nodes = self._data.get_nodes(sets=self._sets, kind='beam', disconnected=True)
nodes_verified = self._gr_verified['nodes']
self.assertTrue(np.allclose(nodes, nodes_verified))
def test_get_elements(self):
elems = self._data.get_elements(sets=self._sets, kind='beam', disconnected=True)
connectivity = self._gr_verified['connectivity']
offset = self._gr_verified['offset']
eltyp = self._gr_verified['eltyp']
self.assertTrue(np.allclose(elems[0], connectivity))
self.assertTrue(np.allclose(elems[1], offset))
self.assertTrue(np.allclose(elems[2], eltyp))
def test_get_elementresults_beamforce(self):
res = self._data.get_elementresults('beamforce', rescases=1, sets=self._sets)
res_verified = self._gr_verified['beamforce']
self.assertTrue(np.allclose(res, res_verified))
class TestStrucDataCase04(unittest.TestCase):
"""1st order 4-node quad plate elements, element results
"""
@classmethod
def setUpClass(cls):
# establish StrucData instances and associated verification data
cls._data = fs.open_sif(os.path.join(FILES, 'struc', 'single_super_elem', 'test01_1stord_linstat_R1.SIU'))
cls._f_verified = h5py.File(os.path.join(FILES, 'verified_testdata.h5'), 'r')
cls._gr_verified = cls._f_verified['test01_1stord_linstat_R1/MD_plates/elemres']
@classmethod
def tearDownClass(cls):
cls._data.close()
cls._f_verified.close()
def test_get_nodes(self):
nodes = self._data.get_nodes(sets='MD_plates', kind='shell', disconnected=True)
nodes_verified = self._gr_verified['nodes']
self.assertTrue(np.allclose(nodes, nodes_verified))
def test_get_elements(self):
elems = self._data.get_elements(sets='MD_plates', kind='shell', disconnected=True)
connectivity = self._gr_verified['connectivity']
offset = self._gr_verified['offset']
eltyp = self._gr_verified['eltyp']
self.assertTrue(np.allclose(elems[0], connectivity))
self.assertTrue(np.allclose(elems[1], offset))
self.assertTrue(np.allclose(elems[2], eltyp))
def test_get_elementresults_generalstress(self):
res = self._data.get_elementresults('generalstress', rescases=1, sets='MD_plates')
res_verified = self._gr_verified['generalstress']
self.assertTrue(np.allclose(res, res_verified))
def test_calc_principal2d(self):
gs = self._data.get_elementresults('generalstress', rescases=1, sets='MD_plates')
ps = fs.calc.principal2d(gs)
ps_verified = self._gr_verified['principalstress']
self.assertTrue(np.allclose(ps, ps_verified))
def test_calc_vonmises2d(self):
gs = self._data.get_elementresults('generalstress', rescases=1, sets='MD_plates')
vm = fs.calc.vonmises2d(gs)
vm_verified = self._gr_verified['vonmisesstress']
self.assertTrue(np.allclose(vm, vm_verified))
def test_get_elementresults_decomposed(self):
res = self._data.get_elementresults('decomposedstress', rescases=1, sets='MD_plates')
res_verified = self._gr_verified['decomposedstress']
self.assertTrue(np.allclose(res, res_verified))
def test_calc_principal_m_2d(self):
ds = self._data.get_elementresults('decomposedstress', rescases=1, sets='MD_plates')
ps = fs.calc.principal2d(ds)
ps_verified = self._gr_verified['principalstress_m']
self.assertTrue(np.allclose(ps, ps_verified))
def test_calc_vonmises_m_2d(self):
ds = self._data.get_elementresults('decomposedstress', rescases=1, sets='MD_plates')
vm = fs.calc.vonmises2d(ds)
vm_verified = self._gr_verified['vonmisesstress_m']
self.assertTrue(np.allclose(vm, vm_verified))
class TestStrucDataCase05(unittest.TestCase):
"""1st order 4-node quad plate elements, node results
"""
@classmethod
def setUpClass(cls):
# establish StrucData instances and associated verification data
cls._data = fs.open_sif(os.path.join(FILES, 'struc', 'single_super_elem', 'test01_1stord_linstat_R1.SIU'))
cls._f_verified = h5py.File(os.path.join(FILES, 'verified_testdata.h5'), 'r')
cls._gr_verified = cls._f_verified['test01_1stord_linstat_R1/MD_plates/noderes']
@classmethod
def tearDownClass(cls):
cls._data.close()
cls._f_verified.close()
def test_get_nodes(self):
nodes = self._data.get_nodes(sets='MD_plates', kind='shell', disconnected=False)
nodes_verified = self._gr_verified['nodes']
self.assertTrue(np.allclose(nodes, nodes_verified))
def test_get_elements(self):
elems = self._data.get_elements(sets='MD_plates', kind='shell', disconnected=False)
connectivity = self._gr_verified['connectivity']
offset = self._gr_verified['offset']
eltyp = self._gr_verified['eltyp']
self.assertTrue(np.allclose(elems[0], connectivity))
self.assertTrue(np.allclose(elems[1], offset))
self.assertTrue(np.allclose(elems[2], eltyp))
def test_get_noderesults_displacement(self):
res = self._data.get_noderesults('displacement', rescases=1,
sets='MD_plates', disconnected=False)
res_verified = self._gr_verified['displacement']
self.assertTrue(np.allclose(res, res_verified))
class TestStrucDataCase06(unittest.TestCase):
"""2nd order 6-node triangular plate elements, element results
"""
@classmethod
def setUpClass(cls):
# establish StrucData instances and associated verification data
cls._data = fs.open_sif(os.path.join(FILES, 'struc', 'single_super_elem', 'test01_2ndord_linstat_R1.SIU'))
cls._f_verified = h5py.File(os.path.join(FILES, 'verified_testdata.h5'), 'r')
cls._gr_verified = cls._f_verified['test01_2ndord_linstat_R1/LD_plates/elemres']
@classmethod
def tearDownClass(cls):
cls._data.close()
cls._f_verified.close()
def test_get_nodes(self):
nodes = self._data.get_nodes(sets='LD_plates', kind='shell', disconnected=True)
nodes_verified = self._gr_verified['nodes']
self.assertTrue(np.allclose(nodes, nodes_verified))
def test_get_elements(self):
elems = self._data.get_elements(sets='LD_plates', kind='shell', disconnected=True)
connectivity = self._gr_verified['connectivity']
offset = self._gr_verified['offset']
eltyp = self._gr_verified['eltyp']
self.assertTrue(np.allclose(elems[0], connectivity))
self.assertTrue(np.allclose(elems[1], offset))
self.assertTrue(np.allclose(elems[2], eltyp))
def test_get_elementresults_generalstress(self):
res = self._data.get_elementresults('generalstress', rescases=1, sets='LD_plates')
res_verified = self._gr_verified['generalstress']
self.assertTrue(np.allclose(res, res_verified))
def test_calc_principal_thickshell(self):
gs = self._data.get_elementresults('generalstress', rescases=1, sets='LD_plates')
ps = fs.calc.principal_thickshell(gs)
ps_verified = self._gr_verified['principalstress']
self.assertTrue(np.allclose(ps, ps_verified))
class TestStrucDataCase07(unittest.TestCase):
"""2nd order 6-node triangular plate elements, node results
"""
@classmethod
def setUpClass(cls):
# establish StrucData instances and associated verification data
cls._data = fs.open_sif(os.path.join(FILES, 'struc', 'single_super_elem', 'test01_2ndord_linstat_R1.SIU'))
cls._f_verified = h5py.File(os.path.join(FILES, 'verified_testdata.h5'), 'r')
cls._gr_verified = cls._f_verified['test01_2ndord_linstat_R1/LD_plates/noderes']
@classmethod
def tearDownClass(cls):
cls._data.close()
cls._f_verified.close()
def test_get_nodes(self):
nodes = self._data.get_nodes(sets='LD_plates', kind='shell', disconnected=False)
nodes_verified = self._gr_verified['nodes']
self.assertTrue(np.allclose(nodes, nodes_verified))
def test_get_elements(self):
elems = self._data.get_elements(sets='LD_plates', kind='shell', disconnected=False)
connectivity = self._gr_verified['connectivity']
offset = self._gr_verified['offset']
eltyp = self._gr_verified['eltyp']
self.assertTrue(np.allclose(elems[0], connectivity))
self.assertTrue(np.allclose(elems[1], offset))
self.assertTrue(np.allclose(elems[2], eltyp))
def test_get_noderesults_displacement(self):
res = self._data.get_noderesults('displacement', rescases=1,
sets='LD_plates', disconnected=False)
res_verified = self._gr_verified['displacement']
self.assertTrue(np.allclose(res, res_verified))
class TestStrucDataCase08(unittest.TestCase):
"""2nd order 3-node beam elements, element results
"""
@classmethod
def setUpClass(cls):
# establish StrucData instances and associated verification data
cls._data = fs.open_sif(os.path.join(FILES, 'struc', 'single_super_elem', 'test01_2ndord_linstat_R1.SIU'))
cls._f_verified = h5py.File(os.path.join(FILES, 'verified_testdata.h5'), 'r')
cls._gr_verified = cls._f_verified['test01_2ndord_linstat_R1/LD_beams/elemres']
cls._sets = ['LD_chords', 'LD_beams']
@classmethod
def tearDownClass(cls):
cls._data.close()
cls._f_verified.close()
def test_get_nodes(self):
nodes = self._data.get_nodes(sets=self._sets, kind='beam', disconnected=True)
nodes_verified = self._gr_verified['nodes']
self.assertTrue(np.allclose(nodes, nodes_verified))
def test_get_elements(self):
elems = self._data.get_elements(sets=self._sets, kind='beam', disconnected=True)
connectivity = self._gr_verified['connectivity']
offset = self._gr_verified['offset']
eltyp = self._gr_verified['eltyp']
self.assertTrue(np.allclose(elems[0], connectivity))
self.assertTrue(np.allclose(elems[1], offset))
self.assertTrue(np.allclose(elems[2], eltyp))
def test_get_elementresults_beamforce(self):
res = self._data.get_elementresults('beamforce', rescases=1, sets=self._sets)
res_verified = self._gr_verified['beamforce']
self.assertTrue(np.allclose(res, res_verified))
class TestStrucDataCase09(unittest.TestCase):
"""2nd order 8-node quadrilateral shell elements, element results
"""
@classmethod
def setUpClass(cls):
# establish StrucData instances and associated verification data
cls._data = fs.open_sif(os.path.join(FILES, 'struc', 'single_super_elem', 'test01_2ndord_linstat_R1.SIU'))
cls._f_verified = h5py.File(os.path.join(FILES, 'verified_testdata.h5'), 'r')
cls._gr_verified = cls._f_verified['test01_2ndord_linstat_R1/MD_plates/elemres']
@classmethod
def tearDownClass(cls):
cls._data.close()
cls._f_verified.close()
def test_get_nodes(self):
nodes = self._data.get_nodes(sets='MD_plates', kind='shell', disconnected=True)
nodes_verified = self._gr_verified['nodes']
self.assertTrue(np.allclose(nodes, nodes_verified))
def test_get_elements(self):
elems = self._data.get_elements(sets='MD_plates', kind='shell', disconnected=True)
connectivity = self._gr_verified['connectivity']
offset = self._gr_verified['offset']
eltyp = self._gr_verified['eltyp']
self.assertTrue(np.allclose(elems[0], connectivity))
self.assertTrue(np.allclose(elems[1], offset))
self.assertTrue(np.allclose(elems[2], eltyp))
def test_get_elementresults_generalstress(self):
res = self._data.get_elementresults('generalstress', rescases=1, sets='MD_plates')
res_verified = self._gr_verified['generalstress']
self.assertTrue(np.allclose(res, res_verified))
def test_calc_principal_thickshell(self):
gs = self._data.get_elementresults('generalstress', rescases=1, sets='MD_plates')
ps = fs.calc.principal_thickshell(gs)
ps_verified = self._gr_verified['principalstress']
self.assertTrue(np.allclose(ps, ps_verified))
class TestStrucDataCase10(unittest.TestCase):
"""2nd order 8-node quadrilateral shell elements, node results
"""
@classmethod
def setUpClass(cls):
# establish StrucData instances and associated verification data
cls._data = fs.open_sif(os.path.join(FILES, 'struc', 'single_super_elem', 'test01_2ndord_linstat_R1.SIU'))
cls._f_verified = h5py.File(os.path.join(FILES, 'verified_testdata.h5'), 'r')
cls._gr_verified = cls._f_verified['test01_2ndord_linstat_R1/MD_plates/noderes']
@classmethod
def tearDownClass(cls):
cls._data.close()
cls._f_verified.close()
def test_get_nodes(self):
nodes = self._data.get_nodes(sets='MD_plates', kind='shell', disconnected=False)
nodes_verified = self._gr_verified['nodes']
self.assertTrue(np.allclose(nodes, nodes_verified))
def test_get_elements(self):
elems = self._data.get_elements(sets='MD_plates', kind='shell', disconnected=False)
connectivity = self._gr_verified['connectivity']
offset = self._gr_verified['offset']
eltyp = self._gr_verified['eltyp']
self.assertTrue(np.allclose(elems[0], connectivity))
self.assertTrue(np.allclose(elems[1], offset))
self.assertTrue(np.allclose(elems[2], eltyp))
def test_get_noderesults_displacement(self):
res = self._data.get_noderesults('displacement', rescases=1,
sets='MD_plates', disconnected=False)
res_verified = self._gr_verified['displacement']
self.assertTrue(np.allclose(res, res_verified))
if __name__ == '__main__':
unittest.main()
| 43.213656 | 114 | 0.690402 | 2,413 | 19,619 | 5.342312 | 0.059677 | 0.055853 | 0.067334 | 0.115429 | 0.959041 | 0.959041 | 0.959041 | 0.959041 | 0.959041 | 0.938872 | 0 | 0.013299 | 0.187471 | 19,619 | 453 | 115 | 43.309051 | 0.79537 | 0.073398 | 0 | 0.909639 | 0 | 0 | 0.136436 | 0.03854 | 0 | 0 | 0 | 0.002208 | 0.186747 | 1 | 0.186747 | false | 0 | 0.01506 | 0 | 0.231928 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fd66a8a5379defbf4d6764b79f383fe0f417621c | 11,838 | py | Python | pyastar.py | srama2512/a-star | 7b1e9ea2413b30feca8d457fefbff2ef94da37cb | [
"MIT"
] | null | null | null | pyastar.py | srama2512/a-star | 7b1e9ea2413b30feca8d457fefbff2ef94da37cb | [
"MIT"
] | null | null | null | pyastar.py | srama2512/a-star | 7b1e9ea2413b30feca8d457fefbff2ef94da37cb | [
"MIT"
] | null | null | null | import cv2
import ctypes
import numpy as np
import inspect
from os.path import abspath, dirname, join
fname = abspath(inspect.getfile(inspect.currentframe()))
lib = ctypes.cdll.LoadLibrary(join(dirname(fname), 'astar.so'))
astar = lib.astar
ndmat_f_type = np.ctypeslib.ndpointer(
dtype=np.float32, ndim=1, flags='C_CONTIGUOUS')
ndmat_i_type = np.ctypeslib.ndpointer(
dtype=np.int32, ndim=1, flags='C_CONTIGUOUS')
astar.restype = ctypes.c_bool
astar.argtypes = [ndmat_f_type, ctypes.c_int, ctypes.c_int,
ctypes.c_int, ctypes.c_int, ctypes.c_bool,
ndmat_i_type]
weighted_astar = lib.weighted_astar
weighted_astar.restype = ctypes.c_bool
weighted_astar.argtypes = [ndmat_f_type, ctypes.c_int, ctypes.c_int,
ctypes.c_int, ctypes.c_int, ctypes.c_bool,
ctypes.c_float, ctypes.c_int, ndmat_i_type]
multi_goal_astar = lib.multi_goal_astar
multi_goal_astar.restype = ctypes.c_int
multi_goal_astar.argtypes = [ndmat_f_type, ndmat_f_type, ctypes.c_int,
ctypes.c_int, ctypes.c_int, ndmat_i_type,
ctypes.c_int, ctypes.c_bool, ndmat_i_type]
multi_goal_weighted_astar = lib.multi_goal_weighted_astar
multi_goal_weighted_astar.restype = ctypes.c_int
multi_goal_weighted_astar.argtypes = [ndmat_f_type, ndmat_f_type, ctypes.c_int,
ctypes.c_int, ctypes.c_int, ndmat_i_type,
ctypes.c_int, ctypes.c_bool, ctypes.c_float,
ctypes.c_int, ndmat_i_type]
def astar_path(obmap, start, goal, allow_diagonal=False):
# Ensure start is within bounds.
if (start[0] < 0 or start[0] >= obmap.shape[0] or
start[1] < 0 or start[1] >= obmap.shape[1]):
raise ValueError('Start of (%d, %d) lies outside grid.' % (start))
# Ensure goal is within bounds.
if (goal[0] < 0 or goal[0] >= obmap.shape[0] or
goal[1] < 0 or goal[1] >= obmap.shape[1]):
raise ValueError('Goal of (%d, %d) lies outside grid.' % (goal))
height, width = obmap.shape
start_idx = np.ravel_multi_index(start, (height, width))
goal_idx = np.ravel_multi_index(goal, (height, width))
# The C++ code writes the solution to the paths array
paths = np.full(height * width, -1, dtype=np.int32)
success = astar(
obmap.flatten(), height, width, start_idx, goal_idx, allow_diagonal,
paths # output parameter
)
if not success:
return np.array([])
coordinates = []
path_idx = goal_idx
while path_idx != start_idx:
pi, pj = np.unravel_index(path_idx, (height, width))
coordinates.append((pi, pj))
path_idx = paths[path_idx]
if coordinates:
coordinates.append(np.unravel_index(start_idx, (height, width)))
return np.vstack(coordinates[::-1])
else:
return np.array([])
def astar_planner(obmap, start, goal, allow_diagonal=False):
"""
start - (x, y) coordinates
goal - (x, y) coordinates
Returns:
path_x, path_y - a list of x, y coordinates
starting from GOAL to the START
"""
# astar_path requires (y, x) as input
path_start = (start[1], start[0])
path_goal = (goal[1], goal[0])
path = astar_path(obmap, path_start, path_goal, allow_diagonal=allow_diagonal)
if path.shape[0] > 0:
path_y = path[:, 0].tolist()[::-1]
path_x = path[:, 1].tolist()[::-1]
else:
path_y = None
path_x = None
return path_x, path_y
def weighted_astar_path(obmap, start, goal, allow_diagonal=False,
wscale=4.0, niters=1):
# Ensure start is within bounds.
if (start[0] < 0 or start[0] >= obmap.shape[0] or
start[1] < 0 or start[1] >= obmap.shape[1]):
raise ValueError('Start of (%d, %d) lies outside grid.' % (start))
# Ensure goal is within bounds.
if (goal[0] < 0 or goal[0] >= obmap.shape[0] or
goal[1] < 0 or goal[1] >= obmap.shape[1]):
raise ValueError('Goal of (%d, %d) lies outside grid.' % (goal))
# Ensure niters >= 1
assert(niters >= 1)
height, width = obmap.shape
start_idx = np.ravel_multi_index(start, (height, width))
goal_idx = np.ravel_multi_index(goal, (height, width))
# The C++ code writes the solution to the paths array
paths = np.full(height * width, -1, dtype=np.int32)
success = weighted_astar(
obmap.flatten(), height, width, start_idx, goal_idx,
allow_diagonal, wscale, niters,
paths # output parameter
)
if not success:
return np.array([])
coordinates = []
path_idx = goal_idx
while path_idx != start_idx:
pi, pj = np.unravel_index(path_idx, (height, width))
coordinates.append((pi, pj))
path_idx = paths[path_idx]
if coordinates:
coordinates.append(np.unravel_index(start_idx, (height, width)))
return np.vstack(coordinates[::-1])
else:
return np.array([])
def weighted_astar_planner(obmap, start, goal, allow_diagonal=False,
wscale=4.0, niters=1):
"""
start - (x, y) coordinates
goal - (x, y) coordinates
Returns:
path_x, path_y - a list of x, y coordinates
starting from GOAL to the START
"""
# weighted_astar_path requires (y, x) as input
path_start = (start[1], start[0])
path_goal = (goal[1], goal[0])
path = weighted_astar_path(obmap, path_start, path_goal,
allow_diagonal=allow_diagonal,
wscale=wscale, niters=niters)
if path.shape[0] > 0:
path_y = path[:, 0].tolist()[::-1]
path_x = path[:, 1].tolist()[::-1]
else:
path_y = None
path_x = None
return path_x, path_y
def multi_goal_astar_path(obmap, gmap, start, goals, allow_diagonal=False):
# Ensure start is within bounds.
if (start[0] < 0 or start[0] >= obmap.shape[0] or
start[1] < 0 or start[1] >= obmap.shape[1]):
raise ValueError('Start of (%d, %d) lies outside grid.' % (start))
height, width = obmap.shape
start_idx = np.ravel_multi_index(start, (height, width))
goal_idxs = []
for g_y, g_x in zip(*goals):
goal_idx = np.ravel_multi_index((int(g_y), int(g_x)), (height, width))
goal_idxs.append(goal_idx)
goal_idxs = np.array(goal_idxs, dtype=np.int32)
# The C++ code writes the solution to the paths array
paths = np.full(height * width, -1, dtype=np.int32)
reached_goal_idx = multi_goal_astar(
obmap.flatten(), gmap.flatten(), height, width, start_idx,
goal_idxs, len(goal_idxs), allow_diagonal,
paths # output parameter
)
if reached_goal_idx == -1:
return np.array([])
coordinates = []
path_idx = reached_goal_idx
while path_idx != start_idx:
pi, pj = np.unravel_index(path_idx, (height, width))
coordinates.append((pi, pj))
path_idx = paths[path_idx]
if coordinates:
coordinates.append(np.unravel_index(start_idx, (height, width)))
return np.vstack(coordinates[::-1])
else:
return np.array([])
def multi_goal_astar_planner(
obmap, start, gmap, allow_diagonal=False, use_contours=False
):
"""
start - (x, y) coordinates
goal - (x, y) coordinates
Returns:
path_x, path_y - a list of x, y coordinates
starting from GOAL to the START
"""
# astar_path requires (y, x) as input
path_start = (start[1], start[0])
if not use_contours:
n_labels, labels, stats, centroids = cv2.connectedComponentsWithStats(
(gmap * 255).astype(np.uint8), 8
)
assert n_labels > 1, "MultiGoalAstar: goal map is empty!"
path_goals = (centroids[1:, 1], centroids[1:, 0])
else:
contours = cv2.findContours(
(gmap * 255).astype(np.uint8), cv2.RETR_LIST, cv2.CHAIN_APPROX_NONE
)
path_goals_y, path_goals_x = [], []
for contour in contours:
contour = contour[0][:, 0]
if len(contour.shape) == 1:
continue
path_goals_y.append(contour[:, 1])
path_goals_x.append(contour[:, 0])
path_goals = (np.concatenate(path_goals_y, axis=0),
np.concatenate(path_goals_x, axis=0))
path = multi_goal_astar_path(obmap, gmap, path_start, path_goals, allow_diagonal=allow_diagonal)
if path.shape[0] > 0:
path_y = path[:, 0].tolist()[::-1]
path_x = path[:, 1].tolist()[::-1]
else:
path_y = None
path_x = None
return path_x, path_y
def multi_goal_weighted_astar_path(obmap, gmap, start, goals,
allow_diagonal=False, wscale=4.0, niters=1):
# Ensure start is within bounds.
if (start[0] < 0 or start[0] >= obmap.shape[0] or
start[1] < 0 or start[1] >= obmap.shape[1]):
raise ValueError('Start of (%d, %d) lies outside grid.' % (start))
height, width = obmap.shape
start_idx = np.ravel_multi_index(start, (height, width))
goal_idxs = []
for g_y, g_x in zip(*goals):
goal_idx = np.ravel_multi_index((int(g_y), int(g_x)), (height, width))
goal_idxs.append(goal_idx)
goal_idxs = np.array(goal_idxs, dtype=np.int32)
# The C++ code writes the solution to the paths array
paths = np.full(height * width, -1, dtype=np.int32)
reached_goal_idx = multi_goal_weighted_astar(
obmap.flatten(), gmap.flatten(), height, width, start_idx,
goal_idxs, len(goal_idxs), allow_diagonal, wscale, niters,
paths # output parameter
)
if reached_goal_idx == -1:
return np.array([])
coordinates = []
path_idx = reached_goal_idx
while path_idx != start_idx:
pi, pj = np.unravel_index(path_idx, (height, width))
coordinates.append((pi, pj))
path_idx = paths[path_idx]
if coordinates:
coordinates.append(np.unravel_index(start_idx, (height, width)))
return np.vstack(coordinates[::-1])
else:
return np.array([])
def multi_goal_weighted_astar_planner(obmap, start, gmap, allow_diagonal=False,
use_contours=False, wscale=4.0, niters=1):
"""
start - (x, y) coordinates
goal - (x, y) coordinates
Returns:
path_x, path_y - a list of x, y coordinates
starting from GOAL to the START
"""
# astar_path requires (y, x) as input
path_start = (start[1], start[0])
if not use_contours:
n_labels, labels, stats, centroids = cv2.connectedComponentsWithStats(
(gmap * 255).astype(np.uint8), 8
)
assert n_labels > 1, "MultiGoalAstar: goal map is empty!"
path_goals = (centroids[1:, 1], centroids[1:, 0])
else:
contours = cv2.findContours(
(gmap * 255).astype(np.uint8), cv2.RETR_LIST, cv2.CHAIN_APPROX_NONE
)
path_goals_y, path_goals_x = [], []
for contour in contours:
contour = contour[0][:, 0]
if len(contour.shape) == 1:
continue
path_goals_y.append(contour[:, 1])
path_goals_x.append(contour[:, 0])
path_goals = (np.concatenate(path_goals_y, axis=0),
np.concatenate(path_goals_x, axis=0))
path = multi_goal_weighted_astar_path(
obmap, gmap, path_start, path_goals, allow_diagonal=allow_diagonal,
wscale=wscale, niters=niters,
)
if path.shape[0] > 0:
path_y = path[:, 0].tolist()[::-1]
path_x = path[:, 1].tolist()[::-1]
else:
path_y = None
path_x = None
return path_x, path_y
| 35.023669 | 100 | 0.605508 | 1,629 | 11,838 | 4.195826 | 0.084101 | 0.028676 | 0.029261 | 0.032772 | 0.943233 | 0.923043 | 0.911631 | 0.900219 | 0.884565 | 0.873446 | 0 | 0.020536 | 0.271921 | 11,838 | 337 | 101 | 35.127596 | 0.772479 | 0.108802 | 0 | 0.721992 | 0 | 0 | 0.030172 | 0 | 0 | 0 | 0 | 0 | 0.012448 | 1 | 0.033195 | false | 0 | 0.020747 | 0 | 0.120332 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fd8a386aae3c5b2119d8429e4e265e7d97d78465 | 120 | py | Python | api/src/util/date.py | AlbertSuarez/weCooltra | 0674ed40ad7e19e9706431333434bc162b2cd89b | [
"MIT"
] | 13 | 2019-05-04T01:58:29.000Z | 2021-04-04T14:07:04.000Z | api/src/util/date.py | AlbertSuarez/weCooltra | 0674ed40ad7e19e9706431333434bc162b2cd89b | [
"MIT"
] | null | null | null | api/src/util/date.py | AlbertSuarez/weCooltra | 0674ed40ad7e19e9706431333434bc162b2cd89b | [
"MIT"
] | null | null | null | import datetime
def to_string(str_to_parse):
return datetime.datetime.strptime(str_to_parse, "%Y-%m-%d %H:%M:%S")
| 20 | 72 | 0.716667 | 21 | 120 | 3.857143 | 0.666667 | 0.123457 | 0.246914 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.116667 | 120 | 5 | 73 | 24 | 0.764151 | 0 | 0 | 0 | 0 | 0 | 0.141667 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
fdb69192673c5720326bdf61c78c6c9bc4301529 | 7,538 | py | Python | TopQuarkAnalysis/TopPairBSM/python/RecoInput_ttbar_cfi.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 6 | 2017-09-08T14:12:56.000Z | 2022-03-09T23:57:01.000Z | TopQuarkAnalysis/TopPairBSM/python/RecoInput_ttbar_cfi.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 545 | 2017-09-19T17:10:19.000Z | 2022-03-07T16:55:27.000Z | TopQuarkAnalysis/TopPairBSM/python/RecoInput_ttbar_cfi.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 14 | 2017-10-04T09:47:21.000Z | 2019-10-23T18:04:45.000Z | # from /RelValQCD_Pt_3000_3500/CMSSW_2_1_0_pre6-RelVal-1213987236-IDEAL_V2-2nd/GEN-SIM-DIGI-RAW-HLTDEBUG-RECO
import FWCore.ParameterSet.Config as cms
# from
def RecoInput() :
source = cms.Source("PoolSource",
debugVerbosity = cms.untracked.uint32(200),
debugFlag = cms.untracked.bool(True),
fileNames = cms.untracked.vstring(
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0003/C4807780-C140-DD11-A03E-000423D94700.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/1E214499-FF40-DD11-8FB1-000423D6B48C.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/2C0910EE-0141-DD11-B889-000423D9A212.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/404AF710-1B41-DD11-BCAA-000423D9A2AE.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/44362DF5-0141-DD11-BE89-000423D94E70.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/4893E9E6-0141-DD11-BF9B-000423D174FE.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/4C747DCB-C240-DD11-9FFD-001617C3B654.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/5A8D6CDA-0141-DD11-8385-001617DBCF90.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/6CB037F2-1641-DD11-97F0-000423D60FF6.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/76868FAB-FD40-DD11-B004-000423D985E4.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/98503807-FE40-DD11-BAFD-000423D6B5C4.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/A62D04A2-2E41-DD11-A83A-001617C3B6E8.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/A66AEAF6-E840-DD11-BDAE-000423D6B48C.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/AE397234-CA40-DD11-9413-001617E30F48.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/BED974D0-FD40-DD11-9EE8-000423D992A4.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/DC2591DC-0141-DD11-9BC7-001617C3B710.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/E22815E1-0141-DD11-9A34-000423D94A20.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0004/E2D61307-0241-DD11-AD42-000423D99160.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/027AF342-2D41-DD11-B7C9-000423D6B42C.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/226FAD25-1C41-DD11-8F69-001617DBD556.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/288EEA34-0541-DD11-99F3-001617C3B706.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/2A206EAB-0D41-DD11-93F9-001617C3B77C.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/34AF420E-1A41-DD11-B3B3-000423D6B358.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/3AF6EB27-0A41-DD11-AD0F-001D09F2546F.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/3C5DCCD4-0441-DD11-9DAD-000423D9890C.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/3C7BD8C3-0941-DD11-8F0B-001D09F2525D.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/4448EBFB-1B41-DD11-BDBA-001617C3B6C6.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/4E4F6886-1C41-DD11-B609-001617C3B65A.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/60F01F12-0341-DD11-B2AF-000423D999CA.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/641734FF-0341-DD11-8CEF-001D09F29533.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/64C86292-1841-DD11-8B73-000423D985E4.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/7255AFEF-1941-DD11-B344-000423DD2F34.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/780CDBA3-1841-DD11-ADAA-000423D9870C.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/78FE44A1-0541-DD11-8F39-0016177CA7A0.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/7C0DF817-1E41-DD11-9E09-000423D9880C.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/7C70FC0D-1B41-DD11-935A-000423D9890C.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/7C9FC0F9-2C41-DD11-BD8F-000423D6B5C4.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/80FA9837-1741-DD11-AEF1-001617C3B778.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/8454634B-0341-DD11-BDFB-0019B9F72F97.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/869BFC18-0341-DD11-B513-001617DBD472.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/923E868A-0341-DD11-9140-0030487A18A4.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/92B94E04-0A41-DD11-BB83-001D09F23A84.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/9662B512-1F41-DD11-B784-000423D6B444.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/9C28266C-0B41-DD11-80B1-001D09F2523A.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/9CB53EA6-0841-DD11-B49B-00304879FBB2.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/9EE1C0A7-0641-DD11-9311-000423D98BE8.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/AA70BA5B-1341-DD11-B23B-000423D94534.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/B256FEFB-0941-DD11-BA0E-000423D98930.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/C09D9CC9-0741-DD11-AD14-001D09F232B9.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/C6ED94E3-0841-DD11-8BE5-001D09F2438A.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/C8374E0F-0541-DD11-BFD2-000423D98920.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/D656C8DD-1B41-DD11-AF3A-001617E30F56.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/DCF600E4-1841-DD11-B757-000423D98DB4.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/DE2546D0-0B41-DD11-9999-001D09F248F8.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/DED45D59-0741-DD11-8EB9-001D09F23A34.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/ECF91043-0A41-DD11-9B57-001D09F29538.root',
'/store/relval/2008/6/22/RelVal-RelValTTbar-1213987236-IDEAL_V2-2nd/0005/F25BD264-1E41-DD11-8381-000423D992DC.root'
)
)
return source
| 100.506667 | 127 | 0.762271 | 1,076 | 7,538 | 5.27974 | 0.232342 | 0.153142 | 0.173561 | 0.204189 | 0.647597 | 0.647597 | 0.647597 | 0.647597 | 0.647597 | 0.647597 | 0 | 0.374856 | 0.080923 | 7,538 | 74 | 128 | 101.864865 | 0.44515 | 0.014858 | 0 | 0 | 0 | 0.863636 | 0.869173 | 0.867825 | 0 | 0 | 0 | 0 | 0 | 1 | 0.015152 | false | 0 | 0.015152 | 0 | 0.045455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
8b6effbddcb4b28f7ec00a48ea5bba55cd92ade9 | 19,542 | py | Python | datcore-sdk/python/datcore_sdk/api/api_token_api.py | mguidon/aiohttp-dsm | 612e4c7f6f73df7d6752269965c428fda0276191 | [
"MIT"
] | null | null | null | datcore-sdk/python/datcore_sdk/api/api_token_api.py | mguidon/aiohttp-dsm | 612e4c7f6f73df7d6752269965c428fda0276191 | [
"MIT"
] | null | null | null | datcore-sdk/python/datcore_sdk/api/api_token_api.py | mguidon/aiohttp-dsm | 612e4c7f6f73df7d6752269965c428fda0276191 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Blackfynn Swagger
Swagger documentation for the Blackfynn api # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from datcore_sdk.api_client import ApiClient
class APITokenApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_api_token(self, create_token_request, **kwargs): # noqa: E501
"""creates an API Token for the requesting User # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_api_token(create_token_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateTokenRequest create_token_request: name of the API Token (required)
:return: APITokenSecretDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_api_token_with_http_info(create_token_request, **kwargs) # noqa: E501
else:
(data) = self.create_api_token_with_http_info(create_token_request, **kwargs) # noqa: E501
return data
def create_api_token_with_http_info(self, create_token_request, **kwargs): # noqa: E501
"""creates an API Token for the requesting User # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_api_token_with_http_info(create_token_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateTokenRequest create_token_request: name of the API Token (required)
:return: APITokenSecretDTO
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['create_token_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_api_token" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'create_token_request' is set
if ('create_token_request' not in local_var_params or
local_var_params['create_token_request'] is None):
raise ValueError("Missing the required parameter `create_token_request` when calling `create_api_token`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_token_request' in local_var_params:
body_params = local_var_params['create_token_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/token/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APITokenSecretDTO', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_api_token(self, uuid, **kwargs): # noqa: E501
"""deletes API Token if the requesting User has access to it # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_api_token(uuid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str uuid: API Token UUID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_api_token_with_http_info(uuid, **kwargs) # noqa: E501
else:
(data) = self.delete_api_token_with_http_info(uuid, **kwargs) # noqa: E501
return data
def delete_api_token_with_http_info(self, uuid, **kwargs): # noqa: E501
"""deletes API Token if the requesting User has access to it # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_api_token_with_http_info(uuid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str uuid: API Token UUID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['uuid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_api_token" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'uuid' is set
if ('uuid' not in local_var_params or
local_var_params['uuid'] is None):
raise ValueError("Missing the required parameter `uuid` when calling `delete_api_token`") # noqa: E501
collection_formats = {}
path_params = {}
if 'uuid' in local_var_params:
path_params['uuid'] = local_var_params['uuid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/token/{uuid}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_tokens(self, org_id, **kwargs): # noqa: E501
"""gets all the API Tokens the requesting User has access to in the org requested, if user is a member. will switch current org to the one given. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_tokens(org_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org_id: the id of the organization for which we are retreiving datasets (required)
:return: list[APITokenDTO]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_api_tokens_with_http_info(org_id, **kwargs) # noqa: E501
else:
(data) = self.get_api_tokens_with_http_info(org_id, **kwargs) # noqa: E501
return data
def get_api_tokens_with_http_info(self, org_id, **kwargs): # noqa: E501
"""gets all the API Tokens the requesting User has access to in the org requested, if user is a member. will switch current org to the one given. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_tokens_with_http_info(org_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org_id: the id of the organization for which we are retreiving datasets (required)
:return: list[APITokenDTO]
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['org_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_tokens" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'org_id' is set
if ('org_id' not in local_var_params or
local_var_params['org_id'] is None):
raise ValueError("Missing the required parameter `org_id` when calling `get_api_tokens`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org_id' in local_var_params:
path_params['orgId'] = local_var_params['org_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/token/organization/{organization}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[APITokenDTO]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_tokens_0(self, **kwargs): # noqa: E501
"""gets all the API Tokens the requesting User has access to # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_tokens_0(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[APITokenDTO]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_api_tokens_0_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_api_tokens_0_with_http_info(**kwargs) # noqa: E501
return data
def get_api_tokens_0_with_http_info(self, **kwargs): # noqa: E501
"""gets all the API Tokens the requesting User has access to # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_tokens_0_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[APITokenDTO]
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_tokens_0" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/token/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[APITokenDTO]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_api_token(self, uuid, update_token_request, **kwargs): # noqa: E501
"""updates the API Token if the requesting User has access to it # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_api_token(uuid, update_token_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str uuid: API Token UUID (required)
:param UpdateTokenRequest update_token_request: API Token Updates (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_api_token_with_http_info(uuid, update_token_request, **kwargs) # noqa: E501
else:
(data) = self.update_api_token_with_http_info(uuid, update_token_request, **kwargs) # noqa: E501
return data
def update_api_token_with_http_info(self, uuid, update_token_request, **kwargs): # noqa: E501
"""updates the API Token if the requesting User has access to it # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_api_token_with_http_info(uuid, update_token_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str uuid: API Token UUID (required)
:param UpdateTokenRequest update_token_request: API Token Updates (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['uuid', 'update_token_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_api_token" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'uuid' is set
if ('uuid' not in local_var_params or
local_var_params['uuid'] is None):
raise ValueError("Missing the required parameter `uuid` when calling `update_api_token`") # noqa: E501
# verify the required parameter 'update_token_request' is set
if ('update_token_request' not in local_var_params or
local_var_params['update_token_request'] is None):
raise ValueError("Missing the required parameter `update_token_request` when calling `update_api_token`") # noqa: E501
collection_formats = {}
path_params = {}
if 'uuid' in local_var_params:
path_params['uuid'] = local_var_params['uuid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_token_request' in local_var_params:
body_params = local_var_params['update_token_request']
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/token/{uuid}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.399194 | 167 | 0.623733 | 2,370 | 19,542 | 4.839241 | 0.073418 | 0.048827 | 0.073241 | 0.031389 | 0.939925 | 0.935217 | 0.914552 | 0.901212 | 0.890923 | 0.881071 | 0 | 0.014612 | 0.292601 | 19,542 | 495 | 168 | 39.478788 | 0.815032 | 0.32663 | 0 | 0.74717 | 1 | 0 | 0.167255 | 0.033467 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041509 | false | 0 | 0.015094 | 0 | 0.116981 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
4799e265eb56743dec224c85da5e509aada272e6 | 5,547 | py | Python | main.py | gabrieldevuk/Epic-Games-Nitro-Auto | 6e8b722ba823da7e931be4a452b6e19353066b03 | [
"MIT"
] | 2 | 2021-08-03T14:11:32.000Z | 2021-08-18T13:24:30.000Z | main.py | ukphotography/Epic-Games-Nitro-Auto | 6e8b722ba823da7e931be4a452b6e19353066b03 | [
"MIT"
] | 5 | 2021-06-15T20:16:18.000Z | 2021-06-20T12:27:14.000Z | main.py | ukphotography/Epic-Games-Nitro-Auto | 6e8b722ba823da7e931be4a452b6e19353066b03 | [
"MIT"
] | null | null | null | from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
from time import sleep
import random
with open("emails.txt") as f:
for line in f:
email = line.strip("\n")
options = webdriver.ChromeOptions()
options.add_experimental_option("excludeSwitches", ["enable-logging"])
driver = webdriver.Chrome(options=options, executable_path=r"chromedriver.exe")
driver.get("https://www.epicgames.com/store/en-US/p/discord--discord-nitro")
sleep(4)
driver.find_element_by_xpath('//*[@id="user"]/ul/li/a/span').click()
sleep(3)
driver.find_element_by_xpath('//*[@id="to-register"]/span').click()
sleep(1)
driver.find_element_by_xpath('//*[@id="login-with-epic"]').click()
name = random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz")
lastname = random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz")
displayname = random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz")
password = "1" + "!" + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz") + random.choice("abcdefghijklmnopqrstuvwxyz")
sleep(3)
driver.find_element_by_xpath('/html/body/div[2]/div[2]/div/div/div/form/div[2]/div[1]/div/input').click()
driver.find_element_by_xpath('/html/body/div[2]/div[2]/div/div/div/form/div[2]/div[1]/div/input').send_keys(name)
driver.find_element_by_xpath('/html/body/div[2]/div[2]/div/div/div/form/div[2]/div[2]/div/input').click()
driver.find_element_by_xpath('/html/body/div[2]/div[2]/div/div/div/form/div[2]/div[2]/div/input').send_keys(lastname)
driver.find_element_by_xpath('/html/body/div[2]/div[2]/div/div/div/form/div[3]/div/input').click()
driver.find_element_by_xpath('/html/body/div[2]/div[2]/div/div/div/form/div[3]/div/input').send_keys(displayname)
driver.find_element_by_xpath('/html/body/div[2]/div[2]/div/div/div/form/div[4]/div/input').click()
driver.find_element_by_xpath('/html/body/div[2]/div[2]/div/div/div/form/div[4]/div/input').send_keys(email)
driver.find_element_by_xpath('/html/body/div[2]/div[2]/div/div/div/form/div[5]/div/input').click()
driver.find_element_by_xpath('/html/body/div[2]/div[2]/div/div/div/form/div[5]/div/input').send_keys(password)
driver.find_element_by_xpath('/html/body/div[2]/div[2]/div/div/div/form/label[2]/span[1]/span/input').click()
driver.find_element_by_xpath('/html/body/div[2]/div[2]/div/div/div/form/div[7]').click()
input(f"Prompted Captcha to solve, please solve this manually! Press enter when done!")
print(f"Account created with email: {email} and password: {password}")
input("Awaiting verification code, check email and enter code manually.")
driver.find_element_by_xpath('/html/body/div[1]/div/div[4]/main/div/div[3]/div/div/div[2]/div[2]/div/aside/div/div/div[4]/div/button/span').click()
sleep(1)
driver.find_element_by_xpath('//*[@id="agree"]').click()
driver.find_element_by_xpath('/html/body/div[6]/div/div/div/div[2]/div/div[2]/button').click()
sleep(5)
driver.find_element_by_xpath('/html/body/div[1]/div/div[4]/div/div[4]/div[1]/div[2]/div[4]/div/div/label/div[1]/span').click
driver.find_element_by_xpath('/html/body/div[1]/div/div[4]/div/div[4]/div[1]/div[2]/div[5]/div/div/button').click()
driver.find_element_by_xpath('/html/body/div[1]/div/div[4]/div/div[4]/div[1]/div[2]/div[6]/div[2]/div/div[2]/button[2]').click()
| 104.660377 | 811 | 0.729764 | 715 | 5,547 | 5.559441 | 0.163636 | 0.064906 | 0.38239 | 0.398491 | 0.732327 | 0.700881 | 0.685283 | 0.676226 | 0.676226 | 0.608805 | 0 | 0.015902 | 0.104381 | 5,547 | 52 | 812 | 106.673077 | 0.784219 | 0 | 0 | 0.076923 | 0 | 0.346154 | 0.472066 | 0.410555 | 0.019231 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.057692 | 0.211538 | 0 | 0.211538 | 0.019231 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
47b0f164f62d4fb75a802730ba4913bba8cb32f5 | 35,354 | py | Python | sumo/tests/tools/sumolib/sumolib3d/unittests_shapes_coords_3d.py | iltempe/osmosi | c0f54ecdbb7c7b5602d587768617d0dc50f1d75d | [
"MIT"
] | null | null | null | sumo/tests/tools/sumolib/sumolib3d/unittests_shapes_coords_3d.py | iltempe/osmosi | c0f54ecdbb7c7b5602d587768617d0dc50f1d75d | [
"MIT"
] | null | null | null | sumo/tests/tools/sumolib/sumolib3d/unittests_shapes_coords_3d.py | iltempe/osmosi | c0f54ecdbb7c7b5602d587768617d0dc50f1d75d | [
"MIT"
] | 2 | 2017-12-14T16:41:59.000Z | 2020-10-16T17:51:27.000Z | #!/usr/bin/env python
"""
@file unittest_sumolib_3d.py
@author Marek Heinrich
@version $Id$
This script tests sumolib 3D functions
SUMO, Simulation of Urban MObility; see http://sumo.dlr.de/
Copyright (C) 2016-2017 DLR (http://www.dlr.de/) and contributors
This file is part of SUMO.
SUMO is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
"""
from __future__ import absolute_import
from __future__ import print_function
import sys
import os
import subprocess
import unittest
# Do not use SUMO_HOME here to ensure you are always testing the
# functions from the same tree the test is in
sys.path.append(
os.path.join(os.path.dirname(__file__), '..', '..', '..', '..', 'tools'))
import sumolib # noqa
NODEFILE_2D = 'input_nodes_2d.nod.xml'
NODEFILE_3D = 'input_nodes.nod.xml'
EDGEFILE = 'input_edges.edg.xml'
NETFILE_2D = 'input_net_2d.net.xml'
NETFILE_3D = 'input_net_3d.net.xml'
class Test_Shapes(unittest.TestCase):
""" Tests to check inport of sumo elements with/without z coords. """
@classmethod
def setUpClass(cls):
""" setup generates all sumo files - once. """
netcon_bin = sumolib.checkBinary('netconvert')
# print ('xxxxxxxxxxxxxxxxxxx', netcon_bin)
for node_file, net_file in [
#(NODEFILE_2D, NETFILE_2D),
(NODEFILE_3D, NETFILE_3D)
]:
command = [netcon_bin,
"-n", node_file,
"-e", EDGEFILE,
"-o", net_file,
"--offset.disable-normalization"]
netconvertProcess = subprocess.call(
command,
stdout=sys.stdout,
stderr=sys.stderr)
# cls.sumo_net_2d = sumolib.net.readNet(
# NETFILE_2D,
# withInternal=True)
cls.sumo_net = sumolib.net.readNet(
NETFILE_3D,
withInternal=True)
@classmethod
def tearDownClass(cls):
""" remove the generated net file, once all tests ran """
if os.path.exists(NETFILE_2D):
os.remove(NETFILE_2D)
if os.path.exists(NETFILE_3D):
os.remove(NETFILE_3D)
#### check node's coords ############################################
@unittest.skipIf(False, '')
def test_check_node_coords_2d_for_3d_input_node_no_z(self):
""" test to retrive the coords from a node with no z value
- should be a 2d coords tuple"""
self.assertEqual(
self.sumo_net.getNode('a0').getCoord(),
(100.0, 0.0))
@unittest.skipIf(False, '')
def test_check_node_coords_3d_for_3d_input_node_z_no_z(self):
""" test to retrive the coords from a node with no z value
- should be a 3d coords tuple"""
self.assertEqual(
self.sumo_net.getNode('a0').getCoord3D(),
(100.0, 0.0, 0.0))
@unittest.skipIf(False, '')
def test_check_node_coords_2d_for_3d_input_node_z_not_zero(self):
""" test to retrive the coords from a node with z!=0
- should be a 3d coords tuple"""
self.assertEqual(
self.sumo_net.getNode('a1').getCoord(),
(200.0, 0.0))
@unittest.skipIf(False, '')
def test_check_node_coords_3d_for_3d_input_node_z_not_zero(self):
""" test to retrive the coords from a node with z!=0
- should be a 3d coords tuple"""
self.assertEqual(
self.sumo_net.getNode('a1').getCoord3D(),
(200.0, 0.0, 10.0))
#### check node's shape #############################################
@unittest.skipIf(False, '')
def test_check_node_shape_2d_on_a_node_with_no_z(self):
""" test to retrive the shape from a node with no z value
- should be a 2d coords tuple"""
result = self.sumo_net.getNode('a0').getShape()
self.assertTrue(len(result) > 0)
for shape_point in result:
self.assertTrue(len(shape_point) == 2)
@unittest.skipIf(False, '')
def test_check_node_shape_3d_on_a_node_with_no_z(self):
""" test to retrive the shape from a node with no z value
- should be a 3d coords tuple"""
result = self.sumo_net.getNode('a0').getShape3D()
self.assertTrue(len(result) > 0)
for shape_point in result:
self.assertTrue(len(shape_point) == 3)
@unittest.skipIf(False, '')
def test_check_node_shape_2d_on_a_node_with_z(self):
""" test to retrive the shape from a node with z value
- should be a 2d coords tuple"""
result = self.sumo_net.getNode('a1').getShape3D()
self.assertTrue(len(result) > 0)
for shape_point in result:
self.assertTrue(len(shape_point) == 3)
@unittest.skipIf(False, '')
def test_check_node_shape_3d_on_a_node_with_z(self):
""" test to retrive the shape from a node with z value
- should be a 3d coords tuple"""
result = self.sumo_net.getNode('a1').getShape3D()
self.assertTrue(len(result) > 0)
for shape_point in result:
self.assertTrue(len(shape_point) == 3)
#### check edge's shape #############################################
@unittest.skipIf(False, '')
def test_h001_edge_shape_not_stored(self):
"""
The edge is the center line of an H (both directions,
one lane per edge).
Junction shapes are engaged so the lane of the edge
is somewhat shorter at the start and at the end.
Shape of the edge _is_not_ stored in sumo's net xml
"""
edge_id = 'center_we'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(0) # 'center_we_0'
#### check edge shape ################################
expected_result_raw_edge_shape_2D = \
[(1000, 100), (1200, 100)]
expected_result_raw_edge_shape_3D = \
[(1000, 100, 10), (1200, 100, 10)]
result_raw_edge_shape_2D = the_edge.getRawShape()
result_raw_edge_shape_3D = the_edge.getRawShape3D()
self.assertEqual(result_raw_edge_shape_2D,
expected_result_raw_edge_shape_2D)
self.assertEqual(result_raw_edge_shape_3D,
expected_result_raw_edge_shape_3D)
@unittest.skipIf(False, '')
def test_h001_edge_shape_not_stored(self):
"""
The edge is the center line of an H (both directions,
one lane per edge) with one extra shape point.
Junction shapes are engaged so the lane of the edge
is somewhat shorter at the start and at the end.
Shape of the edge _is_ stored in sumo's net xml
"""
edge_id = 'center_ew'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(0) # 'center_we_0'
#### check edge shape ################################
expected_result_raw_edge_shape_2D = \
[(1200, 100), (1100, 125), (1000, 100)]
expected_result_raw_edge_shape_3D = \
[(1200, 100, 10), (1100, 125, 10), (1000, 100, 10)]
result_raw_edge_shape_2D = the_edge.getRawShape()
result_raw_edge_shape_3D = the_edge.getRawShape3D()
self.assertEqual(result_raw_edge_shape_2D,
expected_result_raw_edge_shape_2D)
self.assertEqual(result_raw_edge_shape_3D,
expected_result_raw_edge_shape_3D)
#### check lane's shape #############################################
@unittest.skipIf(False, '')
def test_h001_lane_shape_2d(self):
"""
The edge is the center line of an H (both directions,
one lane per edge).
Junction shapes are engaged so the lane of the edge
is somewhat shorter at the start and at the end.
2d version
"""
edge_id = 'center_we'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(0) # 'center_we_0'
#### check lane shape - without junction included ####
result_lane_shape_without_junc = \
the_lane.getShape(includeJunctions=False)
self.assertTrue(len(result_lane_shape_without_junc) == 2)
result_start_point_wo = result_lane_shape_without_junc[0]
result_end_point_wo = result_lane_shape_without_junc[1]
# check first shape point coords
self.assertTrue(1000 < result_start_point_wo[0] < 1200) # x
self.assertTrue(90 < result_start_point_wo[1] < 100) # y
# check second shape point coords
self.assertTrue(1000 < result_end_point_wo[0] < 1200) # x
self.assertTrue(90 < result_end_point_wo[1] < 100) # y
#### check lane shape - with junction included #######
result_lane_shape_with_junc = \
the_lane.getShape(includeJunctions=True)
self.assertTrue(len(result_lane_shape_with_junc) == 4)
result_from_point_wi = result_lane_shape_with_junc[0]
result_start_point_wi = result_lane_shape_with_junc[1]
result_end_point_wi = result_lane_shape_with_junc[2]
result_to_point_wi = result_lane_shape_with_junc[3]
# check fromNode coords
self.assertEqual(result_from_point_wi, (1000, 100))
# check first shape point coords
self.assertTrue(1000 < result_start_point_wi[0] < 1200) # x
self.assertTrue(90 < result_start_point_wi[1] < 100) # y
# check second shape point coords
self.assertTrue(1000 < result_end_point_wi[0] < 1200) # x
self.assertTrue(90 < result_end_point_wi[1] < 100) # y
# check toNode coords
self.assertEqual(result_to_point_wi, (1200, 100))
@unittest.skipIf(False, '')
def test_h001_lane_shape_3d(self):
"""
The edge is the center line of an H (both directions,
one lane per edge).
Junction shapes are engaged so the lane of the edge
is somewhat shorter at the start and at the end.
3d version
"""
edge_id = 'center_we'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(0) # 'center_we_0'
#### check lane shape - without junction included ####
result_lane_shape_without_junc = \
the_lane.getShape3D(includeJunctions=False)
self.assertTrue(len(result_lane_shape_without_junc) == 2)
result_start_point_wo = result_lane_shape_without_junc[0]
result_end_point_wo = result_lane_shape_without_junc[1]
# check first shape point coords
self.assertTrue(1000 < result_start_point_wo[0] < 1200) # x
self.assertTrue(90 < result_start_point_wo[1] < 100) # y
self.assertTrue(result_start_point_wo[2] == 10) # z
# check second shape point coords
self.assertTrue(1000 < result_end_point_wo[0] < 1200) # x
self.assertTrue(90 < result_end_point_wo[1] < 100) # y
self.assertTrue(result_end_point_wo[2] == 10) # z
#### check lane shape - with junction included #######
result_lane_shape_with_junc = \
the_lane.getShape3D(includeJunctions=True)
self.assertTrue(len(result_lane_shape_with_junc) == 4)
result_from_point_wi = result_lane_shape_with_junc[0]
result_start_point_wi = result_lane_shape_with_junc[1]
result_end_point_wi = result_lane_shape_with_junc[2]
result_to_point_wi = result_lane_shape_with_junc[3]
# check fromNode coords
self.assertEqual(result_from_point_wi, (1000, 100, 10))
# check first shape point coords
self.assertTrue(1000 < result_start_point_wi[0] < 1200) # x
self.assertTrue(90 < result_start_point_wi[1] < 100) # y
self.assertTrue(result_start_point_wi[2] == 10) # z
# check second shape point coords
self.assertTrue(1000 < result_end_point_wi[0] < 1200) # x
self.assertTrue(90 < result_end_point_wi[1] < 100) # y
self.assertTrue(result_end_point_wi[2] == 10) # z
# check toNode coords
self.assertEqual(result_to_point_wi, (1200, 100, 10))
@unittest.skipIf(False, '')
def test_h003_lane_shape_2d(self):
"""
The edge is the we-center line of an H (both directions,
one lane per edge).
This edge is not a straight line but has shape points defined.
Junction shapes are engaged so the lanes of the edge
are somewhat shorter at the start and at the end.
Still the edge goes from from to to node, so the shape
should start and end with these coords.
2d version.
"""
edge_id = 'center_ew'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(1) # 'center_ew_1'
#### check lane shape - without junction included ####
result_lane_shape_without_junc = \
the_lane.getShape(includeJunctions=False)
self.assertTrue(len(result_lane_shape_without_junc) == 3)
result_start_point_wo = result_lane_shape_without_junc[0]
result_extra_point_wo = result_lane_shape_without_junc[1]
result_end_point_wo = result_lane_shape_without_junc[2]
# check first shape point coords
self.assertTrue(1000 < result_start_point_wo[0] < 1200) # x
self.assertTrue(100 < result_start_point_wo[1] < 110) # y
# check second shape point coords - extra point
self.assertTrue(result_extra_point_wo[0] == 1100) # x
self.assertTrue(125 < result_extra_point_wo[1] < 150) # y
# check third shape point coords
self.assertTrue(1000 < result_end_point_wo[0] < 1200) # x
self.assertTrue(100 < result_end_point_wo[1] < 110) # y
#### check lane shape - with junction included #######
result_lane_shape_with_junc = \
the_lane.getShape(includeJunctions=True)
self.assertTrue(len(result_lane_shape_with_junc) == 5)
result_from_point_wi = result_lane_shape_with_junc[0]
result_start_point_wi = result_lane_shape_with_junc[1]
result_extra_point_wi = result_lane_shape_with_junc[2]
result_end_point_wi = result_lane_shape_with_junc[3]
result_to_point_wi = result_lane_shape_with_junc[4]
# check fromNode coords
self.assertEqual(result_from_point_wi,
(1200, 100))
# check first shape point coords
self.assertTrue(1000 < result_start_point_wi[0] < 1200) # x
self.assertTrue(100 < result_start_point_wi[1] < 110) # y
# check second shape point coords - extra point
self.assertTrue(result_extra_point_wi[0] == 1100) # x
self.assertTrue(125 < result_extra_point_wi[1] < 150) # y
# check third shape point coords
self.assertTrue(1000 < result_end_point_wi[0] < 1200) # x
self.assertTrue(100 < result_end_point_wi[1] < 110) # y
# check toNode coords
self.assertEqual(result_to_point_wi,
(1000, 100))
@unittest.skipIf(False, '')
def test_h003_lane_shape_3d(self):
"""
The edge is the we-center line of an H (both directions,
one lane per edge).
This edge is not a straight line but has shape points defined.
Junction shapes are engaged so the lanes of the edge
are somewhat shorter at the start and at the end.
Still the edge goes from from to to node, so the shape
should start and end with these coords.
3d version.
"""
edge_id = 'center_ew'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(1) # 'center_ew_1'
#### check lane shape - without junction included ####
result_lane_shape_without_junc = \
the_lane.getShape3D(includeJunctions=False)
self.assertTrue(len(result_lane_shape_without_junc) == 3)
result_start_point_wo = result_lane_shape_without_junc[0]
result_extra_point_wo = result_lane_shape_without_junc[1]
result_end_point_wo = result_lane_shape_without_junc[2]
# check first shape point coords
self.assertTrue(1000 < result_start_point_wo[0] < 1200) # x
self.assertTrue(100 < result_start_point_wo[1] < 110) # y
self.assertTrue(result_start_point_wo[2] == 10) # z
# check second shape point coords - extra point
self.assertTrue(result_extra_point_wo[0] == 1100) # x
self.assertTrue(125 < result_extra_point_wo[1] < 150) # y
self.assertTrue(result_extra_point_wo[2] == 10) # z
# check third shape point coords
self.assertTrue(1000 < result_end_point_wo[0] < 1200) # x
self.assertTrue(100 < result_end_point_wo[1] < 110) # y
self.assertTrue(result_end_point_wo[2] == 10) # z
#### check lane shape - with junction included #######
result_lane_shape_with_junc = \
the_lane.getShape3D(includeJunctions=True)
self.assertTrue(len(result_lane_shape_with_junc) == 5)
result_from_point_wi = result_lane_shape_with_junc[0]
result_start_point_wi = result_lane_shape_with_junc[1]
result_extra_point_wi = result_lane_shape_with_junc[2]
result_end_point_wi = result_lane_shape_with_junc[3]
result_to_point_wi = result_lane_shape_with_junc[4]
# check fromNode coords
self.assertEqual(result_from_point_wi,
(1200, 100, 10))
# check first shape point coords
self.assertTrue(1000 < result_start_point_wi[0] < 1200) # x
self.assertTrue(100 < result_start_point_wi[1] < 110) # y
self.assertTrue(result_start_point_wi[2] == 10) # z
# check second shape point coords - extra point
self.assertTrue(result_extra_point_wi[0] == 1100) # x
self.assertTrue(125 < result_extra_point_wi[1] < 150) # y
self.assertTrue(result_extra_point_wi[2] == 10) # z
# check third shape point coords
self.assertTrue(1000 < result_end_point_wi[0] < 1200) # x
self.assertTrue(100 < result_end_point_wi[1] < 110) # y
self.assertTrue(result_end_point_wi[2] == 10) # z
# check toNode coords
self.assertEqual(result_to_point_wi,
(1000, 100, 10))
@unittest.skipIf(False, '')
def test_h004_lane_shape_2d(self):
"""Get an internal lane and its shape.
Shape should not be influenced by the incluldeJunc parameter
Use left cross of the H for this test.
2d version.
"""
edge_id = ':left_center_3'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(0) # ':left_center_3_0'
result_lane_shape_with_junc = \
the_lane.getShape(includeJunctions=True)
result_lane_shape_without_junc = \
the_lane.getShape(includeJunctions=False)
# there should be no difference between the two results
self.assertEqual(result_lane_shape_with_junc,
result_lane_shape_without_junc)
# there must be at least two shape points
self.assertTrue(len(result_lane_shape_without_junc) >= 2)
# each shape point should be somewhat close the the junction
# and on the same z-level
for shape_point in result_lane_shape_without_junc:
self.assertTrue(995 < shape_point[0] < 1005)
self.assertTrue(90 < shape_point[1] < 110)
@unittest.skipIf(False, '')
def test_h004_lane_shape_3d(self):
"""Get an internal lane and its shape.
Shape should not be influenced by the incluldeJunc parameter
Use left cross of the H for this test.
3d version.
"""
edge_id = ':left_center_3'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(0) # ':left_center_3_0'
result_lane_shape_with_junc = \
the_lane.getShape3D(includeJunctions=True)
result_lane_shape_without_junc = \
the_lane.getShape3D(includeJunctions=False)
# there should be no difference between the two results
self.assertEqual(result_lane_shape_with_junc,
result_lane_shape_without_junc)
# there must be at least two shape points
self.assertTrue(len(result_lane_shape_without_junc) >= 2)
# each shape point should be somewhat close the the junction
# and on the same z-level
for shape_point in result_lane_shape_without_junc:
self.assertTrue(995 < shape_point[0] < 1005)
self.assertTrue(90 < shape_point[1] < 110)
self.assertTrue(shape_point[2] == 10)
@unittest.skipIf(False, '')
def test_edge_001_lane_shape_2d(self):
"""
Both way edge is the straight line between two nodes
edge has no extra shape points - no intersections engaged.
Edge is not centered.
2d version.
"""
edge_id = 'straight_with_counter'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(0) # 'straight_with_counter_0'
#### check lane shape - without junction included ####
result_lane_shape_without_junc = \
the_lane.getShape(includeJunctions=False)
self.assertTrue(len(result_lane_shape_without_junc) == 2)
result_start_point_wo = result_lane_shape_without_junc[0]
result_end_point_wo = result_lane_shape_without_junc[1]
# check first shape point coords
self.assertTrue(100 <= result_start_point_wo[0] <= 200) # x
self.assertTrue(-10 <= result_start_point_wo[1] < 0) # y
# check second shape point coords
self.assertTrue(100 <= result_end_point_wo[0] <= 200) # x
self.assertTrue(-10 <= result_end_point_wo[1] < 0) # y
#### check lane shape - with junction included #######
result_lane_shape_with_junc = \
the_lane.getShape(includeJunctions=True)
self.assertTrue(len(result_lane_shape_with_junc) == 4)
result_from_point_wi = result_lane_shape_with_junc[0]
result_start_point_wi = result_lane_shape_with_junc[1]
result_end_point_wi = result_lane_shape_with_junc[2]
result_to_point_wi = result_lane_shape_with_junc[3]
# check fromNode coords
self.assertEqual(result_from_point_wi,
(100, 0))
# check first shape point coords
self.assertTrue(100 <= result_start_point_wi[0] <= 200) # x
self.assertTrue(-10 <= result_start_point_wi[1] < 0) # y
# check second shape point coords
self.assertTrue(100 <= result_end_point_wi[0] <= 200) # x
self.assertTrue(-10 <= result_end_point_wi[1] < 0) # y
# check toNode coords
self.assertEqual(result_to_point_wi,
(200, 0))
@unittest.skipIf(False, '')
def test_edge_001_lane_shape_3d(self):
"""
Both way edge is the straight line between two nodes
edge has no extra shape points - no intersections engaged.
Edge is not centered.
3d version.
"""
edge_id = 'straight_with_counter'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(0) # 'straight_with_counter_0'
#### check lane shape - without junction included ####
result_lane_shape_without_junc = \
the_lane.getShape3D(includeJunctions=False)
self.assertTrue(len(result_lane_shape_without_junc) == 2)
result_start_point_wo = result_lane_shape_without_junc[0]
result_end_point_wo = result_lane_shape_without_junc[1]
# check first shape point coords
self.assertTrue(100 <= result_start_point_wo[0] <= 200) # x
self.assertTrue(-10 <= result_start_point_wo[1] < 0) # y
self.assertTrue(result_start_point_wo[2] == 0) # z
# check second shape point coords
self.assertTrue(100 <= result_end_point_wo[0] <= 200) # x
self.assertTrue(-10 <= result_end_point_wo[1] < 0) # y
self.assertTrue(result_end_point_wo[2] == 10) # z
#### check lane shape - with junction included #######
result_lane_shape_with_junc = \
the_lane.getShape3D(includeJunctions=True)
self.assertTrue(len(result_lane_shape_with_junc) == 4)
result_from_point_wi = result_lane_shape_with_junc[0]
result_start_point_wi = result_lane_shape_with_junc[1]
result_end_point_wi = result_lane_shape_with_junc[2]
result_to_point_wi = result_lane_shape_with_junc[3]
# check fromNode coords
self.assertEqual(result_from_point_wi,
(100, 0, 0))
# check first shape point coords
self.assertTrue(100 <= result_start_point_wi[0] <= 200) # x
self.assertTrue(-10 <= result_start_point_wi[1] < 0) # y
self.assertTrue(result_start_point_wi[2] == 0) # z
# check second shape point coords
self.assertTrue(100 <= result_end_point_wi[0] <= 200) # x
self.assertTrue(-10 <= result_end_point_wi[1] < 0) # y
self.assertTrue(result_end_point_wi[2] == 10) # z
# check toNode coords
self.assertEqual(result_to_point_wi,
(200, 0, 10))
@unittest.skipIf(False, '')
def test_sloopy_edge_003_lane_shape_2d(self):
"""
Both way edge which is a sloopy line between two Nodes
since the edge has extra shape points
- no intersections engaged.
There was only one shape point defined in the edge.xml
The coord of the from and to node where not included
(since this is optional
- the counder direction does inclued them - see below)
2d version.
"""
edge_id = 'sloopy_we'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(0) # 'sloopy_we_0'
#### check lane shape - without junction included ####
result_lane_shape_without_junc = \
the_lane.getShape(includeJunctions=False)
self.assertTrue(len(result_lane_shape_without_junc) == 3)
result_start_point_wo = result_lane_shape_without_junc[0]
result_extra_point_wo = result_lane_shape_without_junc[1]
result_end_point_wo = result_lane_shape_without_junc[2]
# check first shape point coords
self.assertTrue(3000 <= result_start_point_wo[0] <= 3500) # x
self.assertTrue(190 <= result_start_point_wo[1] < 200) # y
# check second shape point coords - extra point
self.assertTrue(result_extra_point_wo[0] == 3250) # x
self.assertTrue(230 <= result_extra_point_wo[1] < 250) # y
# check third shape point coords
self.assertTrue(3000 <= result_end_point_wo[0] <= 3500) # x
self.assertTrue(190 <= result_end_point_wo[1] < 200) # y
#### check lane shape - with junction included #######
result_lane_shape_with_junc = \
the_lane.getShape(includeJunctions=True)
self.assertEqual(len(result_lane_shape_with_junc), 5)
result_from_point_wi = result_lane_shape_with_junc[0]
result_start_point_wi = result_lane_shape_with_junc[1]
result_extra_point_wi = result_lane_shape_with_junc[2]
result_end_point_wi = result_lane_shape_with_junc[3]
result_to_point_wi = result_lane_shape_with_junc[4]
# check fromNode coords
self.assertEqual(result_from_point_wi,
(3000, 200))
# check first shape point coords
self.assertTrue(3000 <= result_start_point_wi[0] <= 3500) # x
self.assertTrue(190 <= result_start_point_wi[1] < 200) # y
# check second shape point coords - extra point
self.assertTrue(result_extra_point_wi[0] == 3250) # x
self.assertTrue(230 <= result_extra_point_wi[1] < 250) # y
# check third shape point coords
self.assertTrue(3000 <= result_end_point_wi[0] <= 3500) # x
self.assertTrue(190 <= result_end_point_wi[1] < 200) # y
# check toNode coords
self.assertEqual(result_to_point_wi,
(3500, 200))
@unittest.skipIf(False, '')
def test_sloopy_edge_003_lane_shape_3d(self):
"""
Both way edge which is a sloopy line between two Nodes
since the edge has extra shape points
- no intersections engaged.
There was only one shape point defined in the edge.xml
The coord of the from and to node where not included
(since this is optional
- the counder direction does inclued them - see below)
3d version.
"""
edge_id = 'sloopy_we'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(0) # 'sloopy_we_0'
#### check lane shape - without junction included ####
result_lane_shape_without_junc = \
the_lane.getShape3D(includeJunctions=False)
self.assertTrue(len(result_lane_shape_without_junc) == 3)
result_start_point_wo = result_lane_shape_without_junc[0]
result_extra_point_wo = result_lane_shape_without_junc[1]
result_end_point_wo = result_lane_shape_without_junc[2]
# check first shape point coords
self.assertTrue(3000 <= result_start_point_wo[0] <= 3500) # x
self.assertTrue(190 <= result_start_point_wo[1] < 200) # y
self.assertTrue(result_start_point_wo[2] == 10) # z
# check second shape point coords - extra point
self.assertTrue(result_extra_point_wo[0] == 3250) # x
self.assertTrue(230 <= result_extra_point_wo[1] < 250) # y
self.assertTrue(result_extra_point_wo[2] == 10) # z
# check third shape point coords
self.assertTrue(3000 <= result_end_point_wo[0] <= 3500) # x
self.assertTrue(190 <= result_end_point_wo[1] < 200) # y
self.assertTrue(result_end_point_wo[2] == 10) # z
#### check lane shape - with junction included #######
result_lane_shape_with_junc = \
the_lane.getShape3D(includeJunctions=True)
self.assertEqual(len(result_lane_shape_with_junc), 5)
result_from_point_wi = result_lane_shape_with_junc[0]
result_start_point_wi = result_lane_shape_with_junc[1]
result_extra_point_wi = result_lane_shape_with_junc[2]
result_end_point_wi = result_lane_shape_with_junc[3]
result_to_point_wi = result_lane_shape_with_junc[4]
# check fromNode coords
self.assertEqual(result_from_point_wi,
(3000, 200, 10))
# check first shape point coords
self.assertTrue(3000 <= result_start_point_wi[0] <= 3500) # x
self.assertTrue(190 <= result_start_point_wi[1] < 200) # y
self.assertTrue(result_start_point_wi[2] == 10) # z
# check second shape point coords - extra point
self.assertTrue(result_extra_point_wi[0] == 3250) # x
self.assertTrue(230 <= result_extra_point_wi[1] < 250) # y
self.assertTrue(result_extra_point_wi[2] == 10) # z
# check third shape point coords
self.assertTrue(3000 <= result_end_point_wi[0] <= 3500) # x
self.assertTrue(190 <= result_end_point_wi[1] < 200) # y
self.assertTrue(result_end_point_wi[2] == 10) # z
# check toNode coords
self.assertEqual(result_to_point_wi,
(3500, 200, 10))
@unittest.skipIf(False, '')
def test_straight_edge_005_lane_shape_2d(self):
"""
Single way edge with spread type center - no shape points
- no intersections engaged.
Shape of edge and lane are identic.
No junctions are included.
2d version.
"""
edge_id = 'straight_no_counter'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(0) # straight_no_counter_0
expected_result = \
[(100.0, 100.0), (200.0, 100.0)]
#### check lane shape ################################
result_lane_shape_with_junc = \
the_lane.getShape(includeJunctions=True)
result_lane_shape_without_junc = \
the_lane.getShape(includeJunctions=False)
self.assertEqual(result_lane_shape_with_junc,
expected_result)
self.assertEqual(result_lane_shape_without_junc,
expected_result)
@unittest.skipIf(False, '')
def test_straight_edge_005_lane_shape(self):
"""
Single way edge with spread type center - no shape points
- no intersections engaged.
Shape of edge and lane are identic.
No junctions are included.
3d version.
"""
edge_id = 'straight_no_counter'
the_edge = self.sumo_net.getEdge(edge_id)
the_lane = the_edge.getLane(0) # straight_no_counter_0
expected_result = \
[(100.0, 100.0, 10.0), (200.0, 100.0, 10.0)]
#### check lane shape ################################
result_lane_shape_with_junc = \
the_lane.getShape3D(includeJunctions=True)
result_lane_shape_without_junc = \
the_lane.getShape3D(includeJunctions=False)
self.assertEqual(result_lane_shape_with_junc,
expected_result)
self.assertEqual(result_lane_shape_without_junc,
expected_result)
class Test_ShapesConvertion(unittest.TestCase):
""" """
@unittest.skipIf(False, '')
def test_convertShape_empty_string(self):
"""Inspecting the sumolib's function convertShape()"""
self.assertEqual(sumolib.net.convertShape(''), [])
@unittest.skipIf(False, '')
def test_convertShape_string_2d_one_point(self):
"""Inspecting the sumolib's function convertShape() with one 2d point"""
self.assertEqual(sumolib.net.convertShape('10,11'), [(10, 11, 0)])
@unittest.skipIf(False, '')
def test_convertShape_string_2d_two_point(self):
"""Inspecting the sumolib's function convertShape() with two 2d points"""
self.assertEqual(sumolib.net.convertShape('10,11 12,13 '),
[(10, 11, 0), (12, 13, 0)])
@unittest.skipIf(False, '')
def test_convertShape_string_3d_one_point(self):
"""Inspecting the sumolib's function convertShape() with one 3d point"""
self.assertEqual(sumolib.net.convertShape('10,11,5'), [(10, 11, 5)])
@unittest.skipIf(False, '')
def test_convertShape_string_3d_two_point(self):
"""Inspecting the sumolib's function convertShape() with two 3d points"""
self.assertEqual(sumolib.net.convertShape('10,11,5 12,13,5'),
[(10, 11, 5), (12, 13, 5)])
if __name__ == '__main__':
unittest.main()
| 35.567404 | 81 | 0.631301 | 4,711 | 35,354 | 4.429845 | 0.063893 | 0.059514 | 0.077627 | 0.054626 | 0.91691 | 0.9114 | 0.908573 | 0.902583 | 0.869136 | 0.861517 | 0 | 0.048452 | 0.27086 | 35,354 | 993 | 82 | 35.603223 | 0.761114 | 0.239775 | 0 | 0.76824 | 0 | 0 | 0.015981 | 0.003737 | 0 | 0 | 0 | 0 | 0.356223 | 1 | 0.062232 | false | 0 | 0.015021 | 0 | 0.081545 | 0.002146 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
47b66a19c0c8450c8941ff9bebae7df0f21c6490 | 69 | py | Python | biclustering/__init__.py | yanqinshen/STA-663-final-project | c84b3a7f11509b8d7a0a88c6a4f36ba1aaf165d1 | [
"MIT"
] | null | null | null | biclustering/__init__.py | yanqinshen/STA-663-final-project | c84b3a7f11509b8d7a0a88c6a4f36ba1aaf165d1 | [
"MIT"
] | null | null | null | biclustering/__init__.py | yanqinshen/STA-663-final-project | c84b3a7f11509b8d7a0a88c6a4f36ba1aaf165d1 | [
"MIT"
] | 3 | 2018-04-28T02:53:22.000Z | 2021-04-27T16:21:26.000Z | from functions import SSVD_python
from functions import plotClusters
| 23 | 34 | 0.884058 | 9 | 69 | 6.666667 | 0.666667 | 0.433333 | 0.633333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.115942 | 69 | 2 | 35 | 34.5 | 0.983607 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
47fab99d4be1fe77c4f3bd6a8ea59d8e6b3474f7 | 127 | py | Python | ignite/contrib/engines/__init__.py | VinhLoiIT/ignite | 3b2b9655ea9f80ce49b8a9f1c2d72f80e2a95f56 | [
"BSD-3-Clause"
] | 83 | 2019-04-14T06:58:15.000Z | 2022-03-01T01:34:03.000Z | ignite/contrib/engines/__init__.py | hefv57/ignite | a22a0f5e909ac70d2a1f76a60b6e84b2134f196c | [
"BSD-3-Clause"
] | 17 | 2019-04-28T04:26:24.000Z | 2022-01-19T15:37:42.000Z | ignite/contrib/engines/__init__.py | hefv57/ignite | a22a0f5e909ac70d2a1f76a60b6e84b2134f196c | [
"BSD-3-Clause"
] | 15 | 2019-09-05T04:22:10.000Z | 2022-01-13T15:31:25.000Z | from ignite.contrib.engines.tbptt import create_supervised_tbptt_trainer
from ignite.contrib.engines.tbptt import Tbptt_Events
| 42.333333 | 72 | 0.889764 | 18 | 127 | 6.055556 | 0.555556 | 0.183486 | 0.311927 | 0.440367 | 0.642202 | 0.642202 | 0 | 0 | 0 | 0 | 0 | 0 | 0.062992 | 127 | 2 | 73 | 63.5 | 0.915966 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
9a3ebe9b66e1af68c6d24bea06be05edf89181f5 | 284 | py | Python | intersectingcircles.py | heerdyes/tortoises | 745479e75e2bce92c41b21c52205501ab2fc44ca | [
"Artistic-2.0"
] | null | null | null | intersectingcircles.py | heerdyes/tortoises | 745479e75e2bce92c41b21c52205501ab2fc44ca | [
"Artistic-2.0"
] | null | null | null | intersectingcircles.py | heerdyes/tortoises | 745479e75e2bce92c41b21c52205501ab2fc44ca | [
"Artistic-2.0"
] | null | null | null | import turtle
t=turtle.Turtle()
t.speed(0)
for i in range(360):
t.forward(1)
t.right(1)
t.rt(180)
for i in range(360):
t.forward(1)
t.right(1)
t.rt(90)
for i in range(360):
t.forward(1)
t.right(1)
t.rt(180)
for i in range(360):
t.forward(1)
t.right(1)
| 14.947368 | 20 | 0.59507 | 61 | 284 | 2.770492 | 0.262295 | 0.08284 | 0.142012 | 0.260355 | 0.798817 | 0.798817 | 0.798817 | 0.798817 | 0.798817 | 0.798817 | 0 | 0.131222 | 0.221831 | 284 | 18 | 21 | 15.777778 | 0.633484 | 0 | 0 | 0.777778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.055556 | 0 | 0.055556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d02820b15c86ec52223d2eaa9d7e9dbcddec27d7 | 68,591 | py | Python | benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/SystemIPC/cmp_xalancbmk/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/SystemIPC/cmp_xalancbmk/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/SystemIPC/cmp_xalancbmk/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.000855152,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.547822,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.94863,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.544066,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 2.04052,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.541369,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.95443,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.000161557,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.019859,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.143571,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.146869,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.143732,
'Execution Unit/Register Files/Runtime Dynamic': 0.166728,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.346926,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.961839,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.77715,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00844937,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00844937,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.0073229,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00281485,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00210979,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0263314,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0823157,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.141189,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.280347,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.479542,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.00973,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.24734,
'L2/Runtime Dynamic': 0.0618189,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 5.335,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.99899,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.132576,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.132576,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.96361,
'Load Store Unit/Runtime Dynamic': 2.78539,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.326911,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.653821,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.116022,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.119548,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0465189,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.759073,
'Memory Management Unit/Runtime Dynamic': 0.166066,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 26.4549,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000563493,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0280194,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.275462,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.304045,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 8.1042,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.00072359,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.232544,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.375086,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.189331,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.796961,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.265853,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.34404,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.000136702,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00975396,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0705041,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0721365,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0706408,
'Execution Unit/Register Files/Runtime Dynamic': 0.0818905,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.148532,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.414871,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.90179,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00391645,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00391645,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00342885,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00133701,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00103625,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.012298,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0369207,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0693467,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.41104,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.180399,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.235532,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.84363,
'Instruction Fetch Unit/Runtime Dynamic': 0.534497,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.105542,
'L2/Runtime Dynamic': 0.0299968,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.44705,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.08297,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0714965,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0714965,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.78467,
'Load Store Unit/Runtime Dynamic': 1.50706,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.176298,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.352596,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0625688,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0640049,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.274262,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0300154,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.537853,
'Memory Management Unit/Runtime Dynamic': 0.0940202,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.2052,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000359246,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0104961,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.115803,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.126658,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.19402,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.000602149,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.21709,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.350158,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.176748,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.743996,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.248195,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.30932,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.000113759,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00910573,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0658213,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0673425,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0659351,
'Execution Unit/Register Files/Runtime Dynamic': 0.0764482,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.138667,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.385901,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.81441,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00373152,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00373152,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.0032647,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00127177,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00096738,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0116951,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0352577,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.064738,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.11789,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.159372,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.21988,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.53626,
'Instruction Fetch Unit/Runtime Dynamic': 0.490942,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0836121,
'L2/Runtime Dynamic': 0.0237109,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.2485,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.98335,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0650728,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0650727,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.55579,
'Load Store Unit/Runtime Dynamic': 1.36934,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.160459,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.320917,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0569473,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0580822,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.256035,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0264845,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.50997,
'Memory Management Unit/Runtime Dynamic': 0.0845667,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 18.5844,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000299257,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00979815,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.107901,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.117999,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.90097,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.000629979,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.199544,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.321857,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.162463,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.683864,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.228124,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.27005,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.000119017,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00836977,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0604985,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0618996,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0606175,
'Execution Unit/Register Files/Runtime Dynamic': 0.0702694,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.127453,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.355474,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.71767,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.0033599,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.0033599,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00294335,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00114865,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000889193,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0105523,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0316111,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0595057,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.78507,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.156544,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.202108,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.18728,
'Instruction Fetch Unit/Runtime Dynamic': 0.460321,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0770266,
'L2/Runtime Dynamic': 0.0224337,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.13231,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.9274,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.061314,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.061314,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.42185,
'Load Store Unit/Runtime Dynamic': 1.29109,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.15119,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.30238,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0536577,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0546905,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.235342,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.026031,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.483625,
'Memory Management Unit/Runtime Dynamic': 0.0807215,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 18.0293,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000312971,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00900669,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0993874,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.108707,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.68095,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 3.754608821272818,
'Runtime Dynamic': 3.754608821272818,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.165827,
'Runtime Dynamic': 0.127101,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 82.4396,
'Peak Power': 115.552,
'Runtime Dynamic': 20.0072,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 82.2738,
'Total Cores/Runtime Dynamic': 19.8801,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.165827,
'Total L3s/Runtime Dynamic': 0.127101,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}} | 75.044858 | 124 | 0.681999 | 8,082 | 68,591 | 5.782108 | 0.066691 | 0.123601 | 0.112987 | 0.093471 | 0.940425 | 0.931823 | 0.919176 | 0.892234 | 0.867197 | 0.846676 | 0 | 0.131654 | 0.224388 | 68,591 | 914 | 125 | 75.044858 | 0.746748 | 0 | 0 | 0.650985 | 0 | 0 | 0.657584 | 0.048111 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d042da1f377d9766876cc6af596385901ef65784 | 294 | py | Python | e3nn/o3/__init__.py | SuperXiang/e3nn | b97b2b9e83f896c6b8b21b2fe20dc356a2c18e0c | [
"MIT"
] | 1 | 2021-02-09T10:34:09.000Z | 2021-02-09T10:34:09.000Z | e3nn/o3/__init__.py | SuperXiang/e3nn | b97b2b9e83f896c6b8b21b2fe20dc356a2c18e0c | [
"MIT"
] | null | null | null | e3nn/o3/__init__.py | SuperXiang/e3nn | b97b2b9e83f896c6b8b21b2fe20dc356a2c18e0c | [
"MIT"
] | null | null | null | from .angular_spherical_harmonics import * # noqa
from .cartesian_spherical_harmonics import * # noqa
from .irreps import * # noqa
from .reduce import * # noqa
from .rotation import * # noqa
from .s2grid import * # noqa
from .tensor_product import * # noqa
from .wigner import * # noqa
| 32.666667 | 52 | 0.727891 | 37 | 294 | 5.648649 | 0.378378 | 0.382775 | 0.4689 | 0.267943 | 0.30622 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004202 | 0.190476 | 294 | 8 | 53 | 36.75 | 0.87395 | 0.132653 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
d0dd1570479406ed8117bb20af61a40e3c831271 | 4,785 | py | Python | simexp_gcn/models/gcn.py | SIMEXP/GCN_package | 6a48631a360a851b25dae4fb6961b9bae4673ac1 | [
"MIT"
] | null | null | null | simexp_gcn/models/gcn.py | SIMEXP/GCN_package | 6a48631a360a851b25dae4fb6961b9bae4673ac1 | [
"MIT"
] | 9 | 2022-01-18T20:56:56.000Z | 2022-03-09T21:50:22.000Z | simexp_gcn/models/gcn.py | SIMEXP/GCN_package | 6a48631a360a851b25dae4fb6961b9bae4673ac1 | [
"MIT"
] | 2 | 2021-07-05T20:14:00.000Z | 2021-09-01T18:23:32.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
import torch_geometric as tg
# import torch_geometric_temporal as tgt
import numpy as np
class YuGCN(torch.nn.Module):
def __init__(self, edge_index, edge_weight, n_timepoints=50, n_classes=2):
super().__init__()
self.edge_index = edge_index
self.edge_weight = edge_weight
self.conv1 = tg.nn.ChebConv(in_channels=n_timepoints,out_channels=32,K=2,bias=True)
self.conv2 = tg.nn.ChebConv(in_channels=32,out_channels=32,K=2,bias=True)
self.conv3 = tg.nn.ChebConv(in_channels=32,out_channels=32,K=2,bias=True)
self.conv4 = tg.nn.ChebConv(in_channels=32,out_channels=32,K=2,bias=True)
self.conv5 = tg.nn.ChebConv(in_channels=32,out_channels=32,K=2,bias=True)
self.conv6 = tg.nn.ChebConv(in_channels=32,out_channels=32,K=2,bias=True)
self.fc1 = nn.Linear(512*32, 256)
self.fc2 = nn.Linear(256, 128)
self.fc3 = nn.Linear(128, n_classes)
self.dropout = nn.Dropout(0.5)
def forward(self, x):
x = self.conv1(x, self.edge_index, self.edge_weight)
x = F.relu(x)
x = self.conv2(x, self.edge_index, self.edge_weight)
x = F.relu(x)
x = self.conv3(x, self.edge_index, self.edge_weight)
x = F.relu(x)
x = self.conv4(x, self.edge_index, self.edge_weight)
x = F.relu(x)
x = self.conv5(x, self.edge_index, self.edge_weight)
x = F.relu(x)
x = self.conv6(x, self.edge_index, self.edge_weight)
x = tg.nn.global_mean_pool(x,torch.from_numpy(np.array(range(x.size(0)),dtype=int)))
x = x.view(-1, 512*32)
x = self.fc1(x)
x = self.dropout(x)
x = self.fc2(x)
x = self.dropout(x)
x = self.fc3(x)
return x
class LoicGCN(torch.nn.Module):
def __init__(self, edge_index, edge_weight, n_timepoints=50, n_classes=2):
super().__init__()
self.edge_index = edge_index
self.edge_weight = edge_weight
self.conv1 = tg.nn.ChebConv(in_channels=n_timepoints, out_channels=32, K=2, bias=True)
self.conv2 = tg.nn.ChebConv(in_channels=32, out_channels=32, K=2, bias=True)
self.conv3 = tg.nn.ChebConv(in_channels=32, out_channels=16, K=2, bias=True)
self.fc1 = nn.Linear(512*16, 256)
self.fc2 = nn.Linear(256, 128)
self.fc3 = nn.Linear(128, n_classes)
self.dropout = nn.Dropout(0.2)
# adding persistent buffer for edges serialization
# self.register_buffer('edge_index', edge_index)
# self.register_buffer('edge_weight', edge_weight)
def forward(self, x):
x = self.conv1(x, self.edge_index, self.edge_weight)
x = F.relu(x)
x = self.dropout(x)
x = self.conv2(x, self.edge_index, self.edge_weight)
x = F.relu(x)
x = self.dropout(x)
x = self.conv3(x, self.edge_index, self.edge_weight)
x = F.relu(x)
x = self.dropout(x)
x = tg.nn.global_mean_pool(x, torch.from_numpy(np.array(range(x.size(0)), dtype=int)))
x = x.view(-1, 512*16)
x = self.fc1(x)
x = self.dropout(x)
x = self.fc2(x)
x = self.dropout(x)
x = self.fc3(x)
return x
class STGCN(torch.nn.Module):
def __init__(self, edge_index, edge_weight, n_timepoints=50, n_classes=2):
super().__init__()
self.edge_index = edge_index
self.edge_weight = edge_weight
self.conv1 = tg.nn.ChebConv(in_channels=n_timepoints, out_channels=32, K=2, bias=True)
self.conv2 = tg.nn.ChebConv(in_channels=32, out_channels=32, K=2, bias=True)
self.conv3 = tg.nn.ChebConv(in_channels=32, out_channels=16, K=2, bias=True)
# self.recurent = tgt.nn.recurrent.STConv(in_channels=16, out_channels=16, K=2, hidden_channels=4, kernel_size=10)
self.fc1 = nn.Linear(512*16, 256)
self.fc2 = nn.Linear(256, 128)
self.fc3 = nn.Linear(128, n_classes)
self.dropout = nn.Dropout(0.2)
# adding persistent buffer for edges serialization
# self.register_buffer('edge_index', edge_index)
# self.register_buffer('edge_weight', edge_weight)
def forward(self, x):
x = self.conv1(x, self.edge_index, self.edge_weight)
x = F.relu(x)
x = self.dropout(x)
x = self.conv2(x, self.edge_index, self.edge_weight)
x = F.relu(x)
x = self.dropout(x)
x = self.conv3(x, self.edge_index, self.edge_weight)
x = F.relu(x)
x = self.dropout(x)
# x = self.recurent(x)
# x = F.relu(x)
# x = self.dropout(x)
x = tg.nn.global_mean_pool(x, torch.from_numpy(np.array(range(x.size(0)), dtype=int)))
x = x.view(-1, 512*16)
x = self.fc1(x)
x = self.dropout(x)
x = self.fc2(x)
x = self.dropout(x)
x = self.fc3(x)
return x | 40.210084 | 118 | 0.63302 | 789 | 4,785 | 3.674271 | 0.105196 | 0.081062 | 0.06623 | 0.087961 | 0.910659 | 0.905485 | 0.905485 | 0.905485 | 0.895826 | 0.890997 | 0 | 0.051496 | 0.224869 | 4,785 | 119 | 119 | 40.210084 | 0.730116 | 0.103657 | 0 | 0.82 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.06 | false | 0 | 0.05 | 0 | 0.17 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d0f959d540901abe9ed5a4a4894ce9ea340c7636 | 159,207 | py | Python | models1.py | whutinfo/web_develop | 0be75ed314b6524b55f83aedc85450f6c17d25c3 | [
"Unlicense"
] | null | null | null | models1.py | whutinfo/web_develop | 0be75ed314b6524b55f83aedc85450f6c17d25c3 | [
"Unlicense"
] | null | null | null | models1.py | whutinfo/web_develop | 0be75ed314b6524b55f83aedc85450f6c17d25c3 | [
"Unlicense"
] | null | null | null | # This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from django.db import models
class AccessTable(models.Model):
access_id = models.IntegerField(blank=True, null=True)
access_name = models.TextField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Access_Table'
class BaseTable(models.Model):
title = models.TextField(blank=True, null=True)
type = models.IntegerField(blank=True, null=True)
node = models.IntegerField(blank=True, null=True)
prefunc = models.IntegerField(blank=True, null=True)
curfunc = models.IntegerField(blank=True, null=True)
afterfunc = models.IntegerField(blank=True, null=True)
value1 = models.IntegerField(blank=True, null=True)
value2 = models.IntegerField(blank=True, null=True)
depart_id = models.IntegerField(blank=True, null=True)
role_id = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Base_Table'
class Data55Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data55_Table'
class Data56Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data56_Table'
class Data57Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data57_Table'
class Data58Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data58_Table'
class Data59Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data59_Table'
class Data60Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data60_Table'
class Data61Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data61_Table'
class Data62Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data62_Table'
class Data63Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data63_Table'
class Data64Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data64_Table'
class Data66Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data66_Table'
class Data67Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data67_Table'
class Data69Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data69_Table'
class Data70Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data70_Table'
class Data72Table(models.Model):
data_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Data72_Table'
class DepartroleTable(models.Model):
dr_id = models.IntegerField(blank=True, null=True)
depart_id = models.IntegerField(blank=True, null=True)
role_id = models.IntegerField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'DepartRole_Table'
class DepartuserTable(models.Model):
dr_id = models.IntegerField(blank=True, null=True)
depart_id = models.IntegerField(blank=True, null=True)
user_id = models.IntegerField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'DepartUser_Table'
class DepartmentTable(models.Model):
depart_id = models.IntegerField(blank=True, null=True)
name = models.TextField(blank=True, null=True)
node = models.TextField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Department_Table'
class DepotinfoTable(models.Model):
depotinfo_id = models.IntegerField(blank=True, null=True)
depotinfo_style = models.TextField(blank=True, null=True)
d_id = models.IntegerField(blank=True, null=True)
d_name = models.TextField(blank=True, null=True)
data_id = models.IntegerField(blank=True, null=True)
rfid = models.TextField(blank=True, null=True)
stock_id = models.IntegerField(blank=True, null=True)
stock_name = models.TextField(blank=True, null=True)
opt_time = models.TextField(blank=True, null=True)
opt_uid = models.IntegerField(blank=True, null=True)
opt_uname = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Depotinfo_Table'
class DictionaryTable(models.Model):
d_id = models.IntegerField(blank=True, null=True)
d_name = models.TextField(blank=True, null=True)
d_describe = models.TextField(blank=True, null=True)
module_id = models.IntegerField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
other_id = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Dictionary_Table'
class ErrorRecordTable(models.Model):
record_id = models.IntegerField(blank=True, null=True)
style = models.IntegerField(blank=True, null=True)
rfid = models.TextField(blank=True, null=True)
d_id = models.IntegerField(blank=True, null=True)
data_id = models.IntegerField(blank=True, null=True)
gettime = models.DateTimeField(blank=True, null=True)
rfid_id = models.IntegerField(blank=True, null=True)
state = models.TextField(blank=True, null=True)
cause = models.TextField(blank=True, null=True)
record = models.TextField(blank=True, null=True)
remark = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Error_Record_Table'
class ErrorTable(models.Model):
error_id = models.IntegerField(blank=True, null=True)
style = models.IntegerField(blank=True, null=True)
rfid = models.TextField(blank=True, null=True)
d_id = models.IntegerField(blank=True, null=True)
data_id = models.IntegerField(blank=True, null=True)
gettime = models.DateTimeField(blank=True, null=True)
rfid_id = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Error_Table'
class Exception(models.Model):
id = models.IntegerField(blank=True, null=True)
style = models.CharField(max_length=50, blank=True, null=True)
method = models.CharField(max_length=50, blank=True, null=True)
parameter = models.CharField(max_length=50, blank=True, null=True)
explain = models.CharField(max_length=50, blank=True, null=True)
etime = models.CharField(max_length=50, blank=True, null=True)
remark = models.CharField(max_length=50, blank=True, null=True)
class Meta:
managed = False
db_table = 'Exception'
class Field55Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field55_Table'
class Field56Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field56_Table'
class Field57Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field57_Table'
class Field58Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field58_Table'
class Field59Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field59_Table'
class Field60Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field60_Table'
class Field61Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field61_Table'
class Field62Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field62_Table'
class Field63Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field63_Table'
class Field64Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field64_Table'
class Field66Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field66_Table'
class Field67Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field67_Table'
class Field69Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field69_Table'
class Field70Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field70_Table'
class Field72Table(models.Model):
field_id = models.IntegerField(blank=True, null=True)
field_name = models.TextField(blank=True, null=True)
field_describe = models.TextField(blank=True, null=True)
field_default = models.TextField(blank=True, null=True)
field_type = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Field72_Table'
class GoodsinfoTable(models.Model):
id = models.IntegerField(blank=True, null=True)
stockin_id = models.IntegerField(db_column='stockIn_id', blank=True, null=True) # Field name made lowercase.
goodsid = models.IntegerField(db_column='goodsId', blank=True, null=True) # Field name made lowercase.
catid = models.TextField(db_column='catId', blank=True, null=True) # Field name made lowercase.
goodssn = models.TextField(db_column='goodsSn', blank=True, null=True) # Field name made lowercase.
goodsstock = models.IntegerField(db_column='goodsStock', blank=True, null=True) # Field name made lowercase.
salecount = models.IntegerField(db_column='saleCount', blank=True, null=True) # Field name made lowercase.
cost = models.FloatField(blank=True, null=True)
price = models.FloatField(blank=True, null=True)
exp_date = models.TextField(blank=True, null=True)
manufactorid = models.IntegerField(db_column='manufactorId', blank=True, null=True) # Field name made lowercase.
supplierid = models.IntegerField(db_column='supplierId', blank=True, null=True) # Field name made lowercase.
shopid = models.IntegerField(db_column='shopId', blank=True, null=True) # Field name made lowercase.
out_time = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'GoodsInfo_Table'
class GoodsTable(models.Model):
goodsid = models.IntegerField(db_column='goodsId', blank=True, null=True) # Field name made lowercase.
goodsname = models.TextField(db_column='goodsName', blank=True, null=True) # Field name made lowercase.
class Meta:
managed = False
db_table = 'Goods_Table'
class InfoTable(models.Model):
info_id = models.IntegerField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
enable = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Info_Table'
class KhcontrolTable(models.Model):
id = models.IntegerField(blank=True, null=True)
name = models.CharField(max_length=50, blank=True, null=True)
birthday = models.TextField(blank=True, null=True)
sex = models.CharField(max_length=50, blank=True, null=True)
phone = models.CharField(max_length=50, blank=True, null=True)
carid = models.TextField(db_column='carId', blank=True, null=True) # Field name made lowercase.
point = models.CharField(max_length=50, blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'KHcontrol_Table'
class ManufactorTable(models.Model):
manufactorid = models.IntegerField(db_column='manufactorId', blank=True, null=True) # Field name made lowercase.
name = models.TextField(blank=True, null=True)
managername = models.TextField(db_column='managerName', blank=True, null=True) # Field name made lowercase.
phone = models.CharField(max_length=50, blank=True, null=True)
address = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Manufactor_Table'
class MapmenuTable(models.Model):
map_id = models.IntegerField(blank=True, null=True)
node = models.CharField(max_length=50, blank=True, null=True)
name = models.CharField(max_length=50, blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'MapMenu_Table'
class MappointTable(models.Model):
point_id = models.IntegerField(blank=True, null=True)
map_id = models.IntegerField(blank=True, null=True)
px = models.TextField(blank=True, null=True)
py = models.TextField(blank=True, null=True)
msg1 = models.TextField(blank=True, null=True)
msg2 = models.TextField(blank=True, null=True)
msg3 = models.TextField(blank=True, null=True)
video_id = models.IntegerField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'MapPoint_Table'
class MenuTable(models.Model):
menu_id = models.IntegerField(blank=True, null=True)
menu_name = models.TextField(blank=True, null=True)
menu_describe = models.TextField(blank=True, null=True)
menu_align = models.TextField(blank=True, null=True)
menu_url = models.TextField(blank=True, null=True)
menu_img = models.TextField(blank=True, null=True)
menu_show = models.IntegerField(blank=True, null=True)
menu_open = models.IntegerField(blank=True, null=True)
menu_node = models.TextField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Menu_Table'
class MokuaiTable(models.Model):
id = models.IntegerField(blank=True, null=True)
mokuai_name = models.TextField(blank=True, null=True)
mokuai_describe = models.TextField(blank=True, null=True)
mokuai_node = models.TextField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'MoKuai_Table'
class ModuleTable(models.Model):
module_id = models.IntegerField(blank=True, null=True)
module_name = models.TextField(blank=True, null=True)
module_describe = models.TextField(blank=True, null=True)
module_node = models.TextField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Module_Table'
class NodeTable(models.Model):
node_id = models.IntegerField(blank=True, null=True)
name = models.TextField(blank=True, null=True)
node = models.TextField(blank=True, null=True)
maxchild = models.TextField(blank=True, null=True)
tree_id = models.IntegerField(blank=True, null=True)
info_id = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Node_Table'
class ObjectTable(models.Model):
object_id = models.IntegerField(blank=True, null=True)
obj_name = models.TextField(blank=True, null=True)
obj_tableid = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Object_Table'
class PatchTable(models.Model):
suoyin = models.IntegerField(blank=True, null=True)
type = models.IntegerField(blank=True, null=True)
step = models.IntegerField(blank=True, null=True)
flag = models.IntegerField(blank=True, null=True)
text = models.TextField(blank=True, null=True)
position = models.TextField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
user_id = models.IntegerField(blank=True, null=True)
performer1 = models.IntegerField(blank=True, null=True)
performer2 = models.IntegerField(blank=True, null=True)
performer3 = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Patch_Table'
class Receiverfidinfo(models.Model):
id = models.IntegerField(blank=True, null=True)
receivetime = models.DateTimeField(db_column='receiveTime', blank=True, null=True) # Field name made lowercase.
cardid = models.TextField(db_column='cardId', blank=True, null=True) # Field name made lowercase.
stationname = models.TextField(db_column='stationName', blank=True, null=True) # Field name made lowercase.
style = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'ReceiveRfidInfo'
class RfidalarmtestTable(models.Model):
rfid_number = models.IntegerField(blank=True, null=True)
rfid = models.TextField(blank=True, null=True)
time = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'RfidAlarmTest_Table'
class RfidinfoTable(models.Model):
rfid_id = models.IntegerField(blank=True, null=True)
rfid_name = models.TextField(blank=True, null=True)
rfid_describe = models.TextField(blank=True, null=True)
rfid_serverip = models.TextField(db_column='rfid_serverIp', blank=True, null=True) # Field name made lowercase.
rfid_serverport = models.TextField(db_column='rfid_serverPort', blank=True, null=True) # Field name made lowercase.
rfid_clientip = models.TextField(db_column='rfid_clientIp', blank=True, null=True) # Field name made lowercase.
rfid_clientport = models.TextField(db_column='rfid_clientPort', blank=True, null=True) # Field name made lowercase.
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'RfidInfo_Table'
class RfidtestTable1(models.Model):
rfid_number = models.IntegerField(blank=True, null=True)
rfid = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'RfidTest_Table1'
class RfidtestTable2(models.Model):
rfid_number = models.IntegerField(blank=True, null=True)
rfid = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'RfidTest_Table2'
class RfidtestTable3(models.Model):
rfid_number = models.IntegerField(blank=True, null=True)
rfid = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'RfidTest_Table3'
class RolepermisTable(models.Model):
per_id = models.IntegerField(blank=True, null=True)
role_id = models.IntegerField(blank=True, null=True)
menu_id = models.IntegerField(blank=True, null=True)
access_str = models.TextField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'RolePermis_Table'
class RoleTable(models.Model):
role_id = models.IntegerField(blank=True, null=True)
name = models.TextField(blank=True, null=True)
node = models.TextField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Role_Table'
class SellercatTable(models.Model):
id = models.IntegerField(blank=True, null=True)
cat_name = models.CharField(max_length=50, blank=True, null=True)
class Meta:
managed = False
db_table = 'SellerCat_Table'
class ShenpiTable(models.Model):
id = models.IntegerField(blank=True, null=True)
suoyin = models.IntegerField(blank=True, null=True)
user_id = models.IntegerField(blank=True, null=True)
advise = models.TextField(blank=True, null=True)
step = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'ShenPi_Table'
class StockinTable(models.Model):
in_id = models.IntegerField(blank=True, null=True)
stockin_id = models.IntegerField(db_column='stockIn_id', blank=True, null=True) # Field name made lowercase.
goodsid = models.IntegerField(db_column='goodsId', blank=True, null=True) # Field name made lowercase.
goodssn = models.TextField(db_column='goodsSn', blank=True, null=True) # Field name made lowercase.
catid = models.IntegerField(db_column='catId', blank=True, null=True) # Field name made lowercase.
inamount = models.IntegerField(db_column='inAmount', blank=True, null=True) # Field name made lowercase.
cost = models.TextField(blank=True, null=True)
performer = models.IntegerField(blank=True, null=True)
in_time = models.TextField(blank=True, null=True)
shopid = models.IntegerField(db_column='shopId', blank=True, null=True) # Field name made lowercase.
manufactorid = models.IntegerField(db_column='manufactorId', blank=True, null=True) # Field name made lowercase.
supplierid = models.IntegerField(db_column='supplierId', blank=True, null=True) # Field name made lowercase.
class Meta:
managed = False
db_table = 'StockIn_Table'
class StockoutTable(models.Model):
out_id = models.IntegerField(blank=True, null=True)
stockout_id = models.IntegerField(db_column='stockOut_id', blank=True, null=True) # Field name made lowercase.
goodsid = models.IntegerField(db_column='goodsId', blank=True, null=True) # Field name made lowercase.
catid = models.IntegerField(db_column='catId', blank=True, null=True) # Field name made lowercase.
goodssn = models.TextField(db_column='goodsSn', blank=True, null=True) # Field name made lowercase.
outamount = models.IntegerField(db_column='outAmount', blank=True, null=True) # Field name made lowercase.
price = models.TextField(blank=True, null=True)
performer = models.IntegerField(blank=True, null=True)
out_time = models.TextField(blank=True, null=True)
shopid = models.IntegerField(db_column='shopId', blank=True, null=True) # Field name made lowercase.
khphone = models.CharField(db_column='KHPhone', max_length=50, blank=True, null=True) # Field name made lowercase.
kehuname = models.TextField(db_column='KeHuName', blank=True, null=True) # Field name made lowercase.
class Meta:
managed = False
db_table = 'StockOut_Table'
class SupplierTable(models.Model):
supplierid = models.IntegerField(db_column='supplierId', blank=True, null=True) # Field name made lowercase.
name = models.TextField(blank=True, null=True)
managername = models.TextField(db_column='managerName', blank=True, null=True) # Field name made lowercase.
phone = models.CharField(max_length=50, blank=True, null=True)
address = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Supplier_Table'
class Text10Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text10_Table'
class Text11Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text11_Table'
class Text14Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text14_Table'
class Text15Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text15_Table'
class Text16Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text16_Table'
class Text17Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text17_Table'
class Text1Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text1_Table'
class Text2Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text2_Table'
class Text3Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text3_Table'
class Text4Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text4_Table'
class Text5Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text5_Table'
class Text6Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text6_Table'
class Text7Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text7_Table'
class Text8Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text8_Table'
class Text9Table(models.Model):
text_id = models.IntegerField(blank=True, null=True)
text_name = models.TextField(blank=True, null=True)
text_str = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Text9_Table'
class TreeTable(models.Model):
tree_id = models.IntegerField(blank=True, null=True)
name = models.TextField(blank=True, null=True)
maxnode = models.TextField(blank=True, null=True)
remark = models.TextField(blank=True, null=True)
info_id = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Tree_Table'
class Useroptrecord(models.Model):
id = models.IntegerField(blank=True, null=True)
uid = models.IntegerField(blank=True, null=True)
opt = models.CharField(max_length=50, blank=True, null=True)
opttime = models.DateTimeField(blank=True, null=True)
remark = models.CharField(max_length=50, blank=True, null=True)
class Meta:
managed = False
db_table = 'UserOptRecord'
class UserpermisTable(models.Model):
per_id = models.IntegerField(blank=True, null=True)
user_id = models.IntegerField(blank=True, null=True)
menu_id = models.IntegerField(blank=True, null=True)
access_str = models.TextField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'UserPermis_Table'
class UserroleTable(models.Model):
ur_id = models.IntegerField(blank=True, null=True)
user_id = models.IntegerField(blank=True, null=True)
role_id = models.IntegerField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'UserRole_Table'
class UserTable(models.Model):
user_id = models.IntegerField(blank=True, null=True)
name = models.TextField(blank=True, null=True)
pwd = models.TextField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
zgaccess_str = models.TextField(db_column='ZGaccess_str', blank=True, null=True) # Field name made lowercase.
value2 = models.TextField(blank=True, null=True)
id = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'User_Table'
class Value10Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value10_Table'
class Value11Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value11_Table'
class Value14Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value14_Table'
class Value15Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value15_Table'
class Value16Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value16_Table'
class Value17Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value17_Table'
class Value1Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value1_Table'
class Value22Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value22_Table'
class Value23Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value23_Table'
class Value24Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value24_Table'
class Value26Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value26_Table'
class Value2Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value2_Table'
class Value34Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value34_Table'
class Value3Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value3_Table'
class Value4Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value4_Table'
class Value5Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value5_Table'
class Value6Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value6_Table'
class Value7Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value7_Table'
class Value8Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value8_Table'
class Value9Table(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value9_Table'
class ValueTable20151224(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value_Table_2015_12_24'
class ValueTable20151228(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value_Table_2015_12_28'
class ValueTable20151229(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value_Table_2015_12_29'
class ValueTable20151230(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value_Table_2015_12_30'
class ValueTable20151231(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value_Table_2015_12_31'
class ValueTable20160120(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value_Table_2016_01_20'
class ValueTable20160301(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value_Table_2016_03_01'
class ValueTable20160303(models.Model):
value_id = models.IntegerField(blank=True, null=True)
value1 = models.TextField(blank=True, null=True)
value2 = models.TextField(blank=True, null=True)
value3 = models.TextField(blank=True, null=True)
value4 = models.TextField(blank=True, null=True)
value5 = models.TextField(blank=True, null=True)
value6 = models.TextField(blank=True, null=True)
value7 = models.TextField(blank=True, null=True)
value8 = models.TextField(blank=True, null=True)
value9 = models.TextField(blank=True, null=True)
value10 = models.TextField(blank=True, null=True)
value11 = models.TextField(blank=True, null=True)
value12 = models.TextField(blank=True, null=True)
value13 = models.TextField(blank=True, null=True)
value14 = models.TextField(blank=True, null=True)
value15 = models.TextField(blank=True, null=True)
value16 = models.TextField(blank=True, null=True)
value17 = models.TextField(blank=True, null=True)
value18 = models.TextField(blank=True, null=True)
value19 = models.TextField(blank=True, null=True)
value20 = models.TextField(blank=True, null=True)
value21 = models.TextField(blank=True, null=True)
value22 = models.TextField(blank=True, null=True)
value23 = models.TextField(blank=True, null=True)
value24 = models.TextField(blank=True, null=True)
value25 = models.TextField(blank=True, null=True)
value26 = models.TextField(blank=True, null=True)
value27 = models.TextField(blank=True, null=True)
value28 = models.TextField(blank=True, null=True)
value29 = models.TextField(blank=True, null=True)
value30 = models.TextField(blank=True, null=True)
value31 = models.TextField(blank=True, null=True)
value32 = models.TextField(blank=True, null=True)
value33 = models.TextField(blank=True, null=True)
value34 = models.TextField(blank=True, null=True)
value35 = models.TextField(blank=True, null=True)
value36 = models.TextField(blank=True, null=True)
value37 = models.TextField(blank=True, null=True)
value38 = models.TextField(blank=True, null=True)
value39 = models.TextField(blank=True, null=True)
value40 = models.TextField(blank=True, null=True)
value41 = models.TextField(blank=True, null=True)
value42 = models.TextField(blank=True, null=True)
value43 = models.TextField(blank=True, null=True)
value44 = models.TextField(blank=True, null=True)
value45 = models.TextField(blank=True, null=True)
value46 = models.TextField(blank=True, null=True)
value47 = models.TextField(blank=True, null=True)
value48 = models.TextField(blank=True, null=True)
value49 = models.TextField(blank=True, null=True)
value50 = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'Value_Table_2016_03_03'
class VideoinfoTable(models.Model):
video_id = models.IntegerField(blank=True, null=True)
video_name = models.TextField(blank=True, null=True)
video_describe = models.TextField(blank=True, null=True)
video_loginip = models.TextField(db_column='video_loginIp', blank=True, null=True) # Field name made lowercase.
video_loginport = models.TextField(db_column='video_loginPort', blank=True, null=True) # Field name made lowercase.
video_loginname = models.TextField(db_column='video_loginName', blank=True, null=True) # Field name made lowercase.
video_loginpwd = models.TextField(db_column='video_loginPwd', blank=True, null=True) # Field name made lowercase.
video_channelnum = models.IntegerField(db_column='video_channelNum', blank=True, null=True) # Field name made lowercase.
video_streamtype = models.IntegerField(db_column='video_streamType', blank=True, null=True) # Field name made lowercase.
video_linkmode = models.IntegerField(db_column='video_linkMode', blank=True, null=True) # Field name made lowercase.
video_speed = models.IntegerField(blank=True, null=True)
create_time = models.TextField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.TextField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'VideoInfo_Table'
class ZidianTable(models.Model):
id = models.IntegerField(blank=True, null=True)
name = models.TextField(blank=True, null=True)
describe = models.TextField(blank=True, null=True)
mokuai_id = models.IntegerField(blank=True, null=True)
create_time = models.DateTimeField(blank=True, null=True)
create_uid = models.IntegerField(blank=True, null=True)
update_time = models.DateTimeField(blank=True, null=True)
update_uid = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'ZiDian_Table'
class AuthGroup(models.Model):
name = models.CharField(unique=True, max_length=80)
class Meta:
managed = False
db_table = 'auth_group'
class AuthGroupPermissions(models.Model):
group = models.ForeignKey(AuthGroup, models.DO_NOTHING)
permission = models.ForeignKey('AuthPermission', models.DO_NOTHING)
class Meta:
managed = False
db_table = 'auth_group_permissions'
unique_together = (('group', 'permission'),)
class AuthPermission(models.Model):
name = models.CharField(max_length=255)
content_type = models.ForeignKey('DjangoContentType', models.DO_NOTHING)
codename = models.CharField(max_length=100)
class Meta:
managed = False
db_table = 'auth_permission'
unique_together = (('content_type', 'codename'),)
class AuthUser(models.Model):
password = models.CharField(max_length=128)
last_login = models.DateTimeField(blank=True, null=True)
is_superuser = models.IntegerField()
username = models.CharField(unique=True, max_length=150)
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=150)
email = models.CharField(max_length=254)
is_staff = models.IntegerField()
is_active = models.IntegerField()
date_joined = models.DateTimeField()
class Meta:
managed = False
db_table = 'auth_user'
class AuthUserGroups(models.Model):
user = models.ForeignKey(AuthUser, models.DO_NOTHING)
group = models.ForeignKey(AuthGroup, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'auth_user_groups'
unique_together = (('user', 'group'),)
class AuthUserUserPermissions(models.Model):
user = models.ForeignKey(AuthUser, models.DO_NOTHING)
permission = models.ForeignKey(AuthPermission, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'auth_user_user_permissions'
unique_together = (('user', 'permission'),)
class DjangoAdminLog(models.Model):
action_time = models.DateTimeField()
object_id = models.TextField(blank=True, null=True)
object_repr = models.CharField(max_length=200)
action_flag = models.PositiveSmallIntegerField()
change_message = models.TextField()
content_type = models.ForeignKey('DjangoContentType', models.DO_NOTHING, blank=True, null=True)
user = models.ForeignKey(AuthUser, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'django_admin_log'
class DjangoContentType(models.Model):
app_label = models.CharField(max_length=100)
model = models.CharField(max_length=100)
class Meta:
managed = False
db_table = 'django_content_type'
unique_together = (('app_label', 'model'),)
class DjangoMigrations(models.Model):
app = models.CharField(max_length=255)
name = models.CharField(max_length=255)
applied = models.DateTimeField()
class Meta:
managed = False
db_table = 'django_migrations'
class DjangoSession(models.Model):
session_key = models.CharField(primary_key=True, max_length=40)
session_data = models.TextField()
expire_date = models.DateTimeField()
class Meta:
managed = False
db_table = 'django_session'
class GoodscatTable(models.Model):
catid = models.IntegerField(db_column='catId', blank=True, null=True) # Field name made lowercase.
catname = models.TextField(db_column='catName', blank=True, null=True) # Field name made lowercase.
class Meta:
managed = False
db_table = 'goodsCat_Table'
class ShanghuTable(models.Model):
sellerid = models.IntegerField(db_column='sellerId', blank=True, null=True) # Field name made lowercase.
name = models.TextField(blank=True, null=True)
type = models.IntegerField(blank=True, null=True)
managername = models.TextField(db_column='managerName', blank=True, null=True) # Field name made lowercase.
phone = models.CharField(max_length=50, blank=True, null=True)
floor = models.IntegerField(blank=True, null=True)
addressnumber = models.IntegerField(blank=True, null=True)
area = models.FloatField(blank=True, null=True)
class Meta:
managed = False
db_table = 'shanghu_Table'
class Sysdiagrams(models.Model):
name = models.CharField(max_length=128, blank=True, null=True)
principal_id = models.IntegerField(blank=True, null=True)
diagram_id = models.IntegerField(blank=True, null=True)
version = models.IntegerField(blank=True, null=True)
definition = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'sysdiagrams'
| 45.513722 | 125 | 0.717186 | 20,571 | 159,207 | 5.509795 | 0.024744 | 0.202405 | 0.292363 | 0.382321 | 0.946242 | 0.941425 | 0.933405 | 0.922474 | 0.911427 | 0.899887 | 0 | 0.030152 | 0.15966 | 159,207 | 3,497 | 126 | 45.526737 | 0.817024 | 0.011494 | 0 | 0.866258 | 1 | 0 | 0.015052 | 0.001424 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.000322 | 0.000322 | 0 | 0.915243 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 12 |
ef6c28a7f4b8899f181e792f0257e952dd9b5246 | 1,067 | py | Python | bazel-bin/skip_thoughts/evaluate.runfiles/__main__/skip_thoughts/encoder_manager.py | xiaoyu-liu/deepPixel | 84b3da8e41095f06b618e0d9879c7be4f29727c9 | [
"Apache-2.0"
] | null | null | null | bazel-bin/skip_thoughts/evaluate.runfiles/__main__/skip_thoughts/encoder_manager.py | xiaoyu-liu/deepPixel | 84b3da8e41095f06b618e0d9879c7be4f29727c9 | [
"Apache-2.0"
] | null | null | null | bazel-bin/skip_thoughts/evaluate.runfiles/__main__/skip_thoughts/encoder_manager.py | xiaoyu-liu/deepPixel | 84b3da8e41095f06b618e0d9879c7be4f29727c9 | [
"Apache-2.0"
] | null | null | null | XSym
0058
08bd5ae1bb1b17c2c93e5ad1b2f3dc98
/Users/Yang/skip_thoughts/skip_thoughts/encoder_manager.py
| 213.4 | 965 | 0.086223 | 12 | 1,067 | 7.416667 | 0.833333 | 0.269663 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.204082 | 0.908154 | 1,067 | 5 | 965 | 213.4 | 0.704082 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
efb6545d1507e56914988e9a4212cfd1b847fc21 | 2,581 | py | Python | tests/backend/dep/test_core.py | daobook/hatch | 1cf39ad1a11ce90bc77fb7fdc4b9202433509179 | [
"MIT"
] | null | null | null | tests/backend/dep/test_core.py | daobook/hatch | 1cf39ad1a11ce90bc77fb7fdc4b9202433509179 | [
"MIT"
] | null | null | null | tests/backend/dep/test_core.py | daobook/hatch | 1cf39ad1a11ce90bc77fb7fdc4b9202433509179 | [
"MIT"
] | null | null | null | import sys
from packaging.requirements import Requirement
from hatch.venv.core import TempVirtualEnv
from hatchling.dep.core import dependencies_in_sync
def test_no_dependencies(platform):
with TempVirtualEnv(sys.executable, platform) as venv:
assert dependencies_in_sync([], venv.sys_path)
def test_dependency_not_found(platform):
with TempVirtualEnv(sys.executable, platform) as venv:
assert not dependencies_in_sync([Requirement('binary')], venv.sys_path)
def test_dependency_found(platform):
with TempVirtualEnv(sys.executable, platform) as venv:
platform.run_command(['pip', 'install', 'binary'], check=True, capture_output=True)
assert dependencies_in_sync([Requirement('binary')], venv.sys_path)
def test_version_unmet(platform):
with TempVirtualEnv(sys.executable, platform) as venv:
platform.run_command(['pip', 'install', 'binary'], check=True, capture_output=True)
assert not dependencies_in_sync([Requirement('binary>9000')], venv.sys_path)
def test_marker_met(platform):
with TempVirtualEnv(sys.executable, platform) as venv:
assert dependencies_in_sync([Requirement('binary; python_version < "1"')], venv.sys_path)
def test_marker_unmet(platform):
with TempVirtualEnv(sys.executable, platform) as venv:
assert not dependencies_in_sync([Requirement('binary; python_version > "1"')], venv.sys_path)
def test_extra_no_dependencies(platform):
with TempVirtualEnv(sys.executable, platform) as venv:
platform.run_command(['pip', 'install', 'binary'], check=True, capture_output=True)
assert not dependencies_in_sync([Requirement('binary[foo]')], venv.sys_path)
def test_unknown_extra(platform):
with TempVirtualEnv(sys.executable, platform) as venv:
platform.run_command(['pip', 'install', 'requests[security]==2.25.1'], check=True, capture_output=True)
assert not dependencies_in_sync([Requirement('requests[foo]')], venv.sys_path)
def test_extra_unmet(platform):
with TempVirtualEnv(sys.executable, platform) as venv:
platform.run_command(['pip', 'install', 'requests==2.25.1'], check=True, capture_output=True)
assert not dependencies_in_sync([Requirement('requests[security]==2.25.1')], venv.sys_path)
def test_extra_met(platform):
with TempVirtualEnv(sys.executable, platform) as venv:
platform.run_command(['pip', 'install', 'requests[security]==2.25.1'], check=True, capture_output=True)
assert dependencies_in_sync([Requirement('requests[security]==2.25.1')], venv.sys_path)
| 40.968254 | 111 | 0.740411 | 331 | 2,581 | 5.567976 | 0.151057 | 0.083559 | 0.107434 | 0.157352 | 0.910472 | 0.910472 | 0.848074 | 0.83885 | 0.83885 | 0.827455 | 0 | 0.011633 | 0.134057 | 2,581 | 62 | 112 | 41.629032 | 0.812975 | 0 | 0 | 0.375 | 0 | 0 | 0.116621 | 0.040294 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.25 | false | 0 | 0.1 | 0 | 0.35 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4bec9517d920badacf2a439961d947c105534cb4 | 41 | py | Python | climatemaps/__init__.py | bartromgens/climatemaps | e5127d84c5b2d6a5aed1ee49a0ec87d3d00ce71b | [
"MIT"
] | 26 | 2016-05-04T02:33:53.000Z | 2022-03-06T18:05:25.000Z | climatemaps/__init__.py | bartromgens/climatemaps | e5127d84c5b2d6a5aed1ee49a0ec87d3d00ce71b | [
"MIT"
] | 13 | 2016-06-26T14:55:49.000Z | 2020-11-15T18:15:12.000Z | climatemaps/__init__.py | bartromgens/climatemaps | e5127d84c5b2d6a5aed1ee49a0ec87d3d00ce71b | [
"MIT"
] | 5 | 2017-03-28T23:51:25.000Z | 2021-02-09T08:59:25.000Z | from . import contour
from . import data
| 13.666667 | 21 | 0.756098 | 6 | 41 | 5.166667 | 0.666667 | 0.645161 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.195122 | 41 | 2 | 22 | 20.5 | 0.939394 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
323e2b637d137b96d25fda4494f78fcd1ad2df2e | 3,451 | py | Python | C3CTF/2019 36C3/numb_theory/divider.sage.py | PurpEth/solved-hacking-problem | 6f289d1647eb9c091caa580c7aae673e3ba02952 | [
"Unlicense"
] | 1 | 2021-08-24T22:16:41.000Z | 2021-08-24T22:16:41.000Z | C3CTF/2019 36C3/numb_theory/divider.sage.py | PurpEth/solved-hacking-problem | 6f289d1647eb9c091caa580c7aae673e3ba02952 | [
"Unlicense"
] | null | null | null | C3CTF/2019 36C3/numb_theory/divider.sage.py | PurpEth/solved-hacking-problem | 6f289d1647eb9c091caa580c7aae673e3ba02952 | [
"Unlicense"
] | null | null | null |
# This file was *autogenerated* from the file divider.sage
from sage.all_cmdline import * # import sage library
_sage_const_76252947276074359115212190635202623074930398881978238358348687907322346550307537044869903273751300560270175469602969387751155949371321221078780453191705788294402166651660708380365520430838482255943461982865321352429408027042182590562536071019724523466166082700961321 = Integer(76252947276074359115212190635202623074930398881978238358348687907322346550307537044869903273751300560270175469602969387751155949371321221078780453191705788294402166651660708380365520430838482255943461982865321352429408027042182590562536071019724523466166082700961321); _sage_const_1 = Integer(1); _sage_const_7 = Integer(7); _sage_const_4 = Integer(4)
n = _sage_const_76252947276074359115212190635202623074930398881978238358348687907322346550307537044869903273751300560270175469602969387751155949371321221078780453191705788294402166651660708380365520430838482255943461982865321352429408027042182590562536071019724523466166082700961321
Z = IntegerModRing(n, is_field=True)
F = PolynomialRing(Z, names=('x',)); (x,) = F._first_ngens(1)
Q = F.quotient(x**_sage_const_4 - _sage_const_7 , names=('y',)); (y,) = Q._first_ngens(1)
sig1 = Q((8397631567215471411597859807632371799444140801738142960076493956917238686625544471258399738080022772196089434702459044585323637502779152788244106732365646228165621231157333590900159699739913678405112686118182575046274170945387086888426980856951768483383379744188677L, 21124328816590699486429742212972834492632397279764937079674194612250756262396655038977817156510059653643712997950409506330820976741162433180098496310685425936303336593882370211699155057896941674308145346440337055567854716378895103628517715334215543428053814207434382L, 38302107595073413770188759033039284745332726497947196718260659297015596461395433064468236639837496037400435085332440930223793480019017043415162109004805645876483722391356966864318187160465695013093504005882719887286775985445665985496095847403229210405133647022044231L, 47201466632662644946654328303416200366387247737869385658149914421830084769847399086993708235978420889002858132339622885436055787321436108476537276871951115787188503323194091834197417656150497142228754765234559119255670936453286414940407241550941585356800050461199269L))
sig2 = Q((25094355307479894556700746866365819356961238739391252053697743206549242694340054421226640011531221307690507281464327458948482616415138593846326354294801466677195272159947970487438909372267873139548856576705226756116055846486115922348540504348228587428136231383806807L, 26938778375892401180867601875053855815310826403231625240173213306165799791889925388259441621380140207312445923147030194995464102057863436611620243653716567639495124197528852236211387992213049232579046173199625014483782638570947635662799301198425019980638296596244081L, 57217719789418345029143341043134801296808015862656059986992617584133855032289955423151572591097601916603771892621206553753845338178112494473703027362803921053969798016535550329773792494468815863960856521783041570911293886831498620023230594926065319268319950173497631L, 25524998589118939795834009791259059660022115221576740461668045552145016624807544412060405016044167255913799546447668335736812432890025586131971064985274986627140887496358869119686667501551917286625217052191965798733269565498256562164106090879556584863513903273231823L))
print((sig1**-_sage_const_1 * sig2).list())
| 230.066667 | 1,086 | 0.950159 | 93 | 3,451 | 34.935484 | 0.483871 | 0.022161 | 0.169283 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.871369 | 0.022312 | 3,451 | 14 | 1,087 | 246.5 | 0.091583 | 0.022023 | 0 | 0 | 1 | 0 | 0.000593 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0.111111 | null | null | 0.111111 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
410f744f0ae60ec82203371a97cc26b9ad72fa4a | 2,278 | py | Python | tools/conan/conans/test/runner_test.py | aversiveplusplus/aversiveplusplus | 5f5fe9faca50197fd6207e2c816efa7e9af6c804 | [
"BSD-3-Clause"
] | 29 | 2016-01-27T09:43:44.000Z | 2020-03-12T04:16:02.000Z | tools/conan/conans/test/runner_test.py | aversiveplusplus/aversiveplusplus | 5f5fe9faca50197fd6207e2c816efa7e9af6c804 | [
"BSD-3-Clause"
] | 20 | 2016-01-22T15:59:33.000Z | 2016-10-28T10:22:45.000Z | tools/conan/conans/test/runner_test.py | aversiveplusplus/aversiveplusplus | 5f5fe9faca50197fd6207e2c816efa7e9af6c804 | [
"BSD-3-Clause"
] | 6 | 2016-02-11T14:09:04.000Z | 2018-03-17T00:18:35.000Z | import unittest
from conans.test.tools import TestClient
import os
class RunnerTest(unittest.TestCase):
def basic_test(self):
conanfile = '''
from conans import ConanFile
from conans.client.runner import ConanRunner
import platform
class ConanFileToolsTest(ConanFile):
def build(self):
self._runner = ConanRunner()
self.run("mkdir test_folder")
'''
files = {"conanfile.py": conanfile}
client = TestClient()
test_folder = os.path.join(client.current_folder, "test_folder")
self.assertFalse(os.path.exists(test_folder))
client.save(files)
client.run("install")
client.run("build")
self.assertTrue(os.path.exists(test_folder))
def cwd_test(self):
conanfile = '''
from conans import ConanFile
from conans.client.runner import ConanRunner
import platform
class ConanFileToolsTest(ConanFile):
def build(self):
self._runner = ConanRunner()
self.run("mkdir test_folder", cwd="child_folder")
'''
files = {"conanfile.py": conanfile}
client = TestClient()
os.makedirs(os.path.join(client.current_folder, "child_folder"))
test_folder = os.path.join(client.current_folder, "child_folder", "test_folder")
self.assertFalse(os.path.exists(test_folder))
client.save(files)
client.run("install")
client.run("build")
self.assertTrue(os.path.exists(test_folder))
def cwd_error_test(self):
conanfile = '''
from conans import ConanFile
from conans.client.runner import ConanRunner
import platform
class ConanFileToolsTest(ConanFile):
def build(self):
self._runner = ConanRunner()
self.run("mkdir test_folder", cwd="non_existing_folder")
'''
files = {"conanfile.py": conanfile}
client = TestClient()
test_folder = os.path.join(client.current_folder, "child_folder", "test_folder")
self.assertFalse(os.path.exists(test_folder))
client.save(files)
client.run("install")
error = client.run("build", ignore_error=True)
self.assertTrue(error)
self.assertIn("Error while executing 'mkdir test_folder'", client.user_io.out)
self.assertFalse(os.path.exists(test_folder))
| 29.973684 | 88 | 0.669447 | 265 | 2,278 | 5.622642 | 0.181132 | 0.107383 | 0.07651 | 0.06443 | 0.838255 | 0.838255 | 0.838255 | 0.781879 | 0.781879 | 0.781879 | 0 | 0 | 0.213784 | 2,278 | 75 | 89 | 30.373333 | 0.831937 | 0 | 0 | 0.754098 | 0 | 0 | 0.403424 | 0.050922 | 0 | 0 | 0 | 0 | 0.131148 | 1 | 0.04918 | false | 0 | 0.196721 | 0 | 0.262295 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f5b295f128c3a9e47d069e66de977b84e1157000 | 10,308 | py | Python | concreteproperties/tests/test_reinforced_concrete_basics.py | robbievanleeuwen/concrete-properties | cf3b7932f87373f3cf3856b229ecb82eeee8146d | [
"MIT"
] | null | null | null | concreteproperties/tests/test_reinforced_concrete_basics.py | robbievanleeuwen/concrete-properties | cf3b7932f87373f3cf3856b229ecb82eeee8146d | [
"MIT"
] | null | null | null | concreteproperties/tests/test_reinforced_concrete_basics.py | robbievanleeuwen/concrete-properties | cf3b7932f87373f3cf3856b229ecb82eeee8146d | [
"MIT"
] | null | null | null | import pytest
from concreteproperties.material import Concrete, Steel
from concreteproperties.concrete_section import ConcreteSection
from concreteproperties.stress_strain_profile import (
WhitneyStressBlock,
SteelElasticPlastic,
)
from sectionproperties.pre.library.concrete_sections import (
concrete_rectangular_section,
concrete_tee_section,
)
from sectionproperties.pre.library.primitive_sections import circular_section_by_area
# All examples come from:
# Warner, R. F., Foster, S. J., & Kilpatrick, A. E. (2007). Reinforced Concrete Basics (1st ed.). Pearson Australia.
def test_example_3_1():
concrete = Concrete(
name="32 MPa Concrete",
elastic_modulus=30.1e3,
density=2.4e-6,
ultimate_stress_strain_profile=WhitneyStressBlock(
alpha_2=0.85,
gamma=0.83,
compressive_strength=32,
ultimate_strain=0.003,
),
alpha_1=0.85,
flexural_tensile_strength=3.4,
residual_shrinkage_stress=2.4,
colour="lightgrey",
)
steel = Steel(
name="500 MPa Steel",
elastic_modulus=200e3,
density=7.85e-6,
yield_strength=500,
ultimate_stress_strain_profile=SteelElasticPlastic(
yield_strength=500,
elastic_modulus=200e3,
fracture_strain=0.05,
),
colour="grey",
)
geometry = concrete_rectangular_section(
b=300,
d=450,
dia_top=24,
n_top=0,
dia_bot=24,
n_bot=3,
n_circle=4,
cover=48,
area_top=0,
area_bot=450,
conc_mat=concrete,
steel_mat=steel,
)
conc_sec = ConcreteSection(geometry)
props = conc_sec.get_transformed_gross_properties(elastic_modulus=30.1e3)
m_c = conc_sec.calculate_cracking_moment()
assert pytest.approx(conc_sec.gross_properties.cy, abs=1) == 450 - 234
assert pytest.approx(props.ixx_c, rel=0.01) == 2.47e9
assert pytest.approx(m_c, rel=0.01) == 11.4e6
# TODO: cracked neutral axis, cracking second moment of area, stresses
def test_example_3_2():
concrete = Concrete(
name="32 MPa Concrete",
elastic_modulus=30.1e3,
density=2.4e-6,
ultimate_stress_strain_profile=WhitneyStressBlock(
alpha_2=0.85,
gamma=0.83,
compressive_strength=32,
ultimate_strain=0.003,
),
alpha_1=0.85,
flexural_tensile_strength=3.4,
residual_shrinkage_stress=2.8,
colour="lightgrey",
)
steel = Steel(
name="500 MPa Steel",
elastic_modulus=200e3,
density=7.85e-6,
yield_strength=500,
ultimate_stress_strain_profile=SteelElasticPlastic(
yield_strength=500,
elastic_modulus=200e3,
fracture_strain=0.05,
),
colour="grey",
)
geom = concrete_tee_section(
b=300,
d=800,
b_f=1000,
d_f=120,
dia_top=20,
n_top=0,
dia_bot=28,
n_bot=0,
n_circle=4,
cover=30,
conc_mat=concrete,
steel_mat=steel,
).shift_section(x_offset=350)
# top bars
for idx in range(7):
bar = circular_section_by_area(area=310, n=4, material=steel).shift_section(
x_offset=40 + idx * 920 / 6, y_offset=740
)
geom = (geom - bar) + bar
# bot bars 1
for idx in range(3):
bar = circular_section_by_area(area=620, n=4, material=steel).shift_section(
x_offset=394 + idx * 212 / 2, y_offset=60
)
geom = (geom - bar) + bar
# bot bars 2
for idx in range(3):
bar = circular_section_by_area(area=620, n=4, material=steel).shift_section(
x_offset=394 + idx * 212 / 2, y_offset=120
)
geom = (geom - bar) + bar
conc_sec = ConcreteSection(geom)
props = conc_sec.get_transformed_gross_properties(elastic_modulus=30.1e3)
m_c = conc_sec.calculate_cracking_moment()
assert pytest.approx(conc_sec.gross_properties.cy, abs=1) == 800 - 327
assert pytest.approx(props.ixx_c, rel=0.01) == 24.1e9
assert pytest.approx(m_c, rel=0.01) == 30.6e6
# TODO: cracked neutral axis, cracking second moment of area, stresses
def test_example_3_4():
concrete = Concrete(
name="32 MPa Concrete",
elastic_modulus=30.1e3,
density=2.4e-6,
ultimate_stress_strain_profile=WhitneyStressBlock(
alpha_2=0.85,
gamma=0.83,
compressive_strength=32,
ultimate_strain=0.003,
),
alpha_1=0.85,
flexural_tensile_strength=3.4,
residual_shrinkage_stress=2.8,
colour="lightgrey",
)
steel = Steel(
name="500 MPa Steel",
elastic_modulus=200e3,
density=7.85e-6,
yield_strength=500,
ultimate_stress_strain_profile=SteelElasticPlastic(
yield_strength=500,
elastic_modulus=200e3,
fracture_strain=0.05,
),
colour="grey",
)
geom = concrete_tee_section(
b=300,
d=800,
b_f=1000,
d_f=120,
dia_top=20,
n_top=0,
dia_bot=28,
n_bot=0,
n_circle=4,
cover=30,
conc_mat=concrete,
steel_mat=steel,
).shift_section(x_offset=350)
# top bars
for idx in range(7):
bar = circular_section_by_area(area=800, n=4, material=steel).shift_section(
x_offset=46 + idx * 908 / 6, y_offset=740
)
geom = (geom - bar) + bar
# bot bars
for idx in range(3):
bar = circular_section_by_area(area=620, n=4, material=steel).shift_section(
x_offset=394 + idx * 212 / 2, y_offset=60
)
geom = (geom - bar) + bar
conc_sec = ConcreteSection(geom)
# TODO: cracked neutral axis, cracking second moment of area
def test_example_3_8():
concrete = Concrete(
name="32 MPa Concrete",
elastic_modulus=30.1e3,
density=2.4e-6,
ultimate_stress_strain_profile=WhitneyStressBlock(
alpha_2=0.85,
gamma=0.83,
compressive_strength=32,
ultimate_strain=0.003,
),
alpha_1=0.85,
flexural_tensile_strength=3.4,
residual_shrinkage_stress=2.4,
colour="lightgrey",
)
steel = Steel(
name="500 MPa Steel",
elastic_modulus=200e3,
density=7.85e-6,
yield_strength=500,
ultimate_stress_strain_profile=SteelElasticPlastic(
yield_strength=500,
elastic_modulus=200e3,
fracture_strain=0.05,
),
colour="grey",
)
geometry = concrete_rectangular_section(
b=300,
d=450,
dia_top=24,
n_top=0,
dia_bot=24,
n_bot=4,
n_circle=4,
cover=48,
area_top=0,
area_bot=450,
conc_mat=concrete,
steel_mat=steel,
)
conc_sec = ConcreteSection(geometry)
n, mx, my, mv, d_n = conc_sec.ultimate_bending_capacity()
assert pytest.approx(d_n, abs=1) == 133
assert pytest.approx(mx, rel=0.01) == 302e6
def test_example_3_9():
concrete = Concrete(
name="32 MPa Concrete",
elastic_modulus=30.1e3,
density=2.4e-6,
ultimate_stress_strain_profile=WhitneyStressBlock(
alpha_2=0.85,
gamma=0.83,
compressive_strength=32,
ultimate_strain=0.003,
),
alpha_1=0.85,
flexural_tensile_strength=3.4,
residual_shrinkage_stress=2.4,
colour="lightgrey",
)
steel = Steel(
name="500 MPa Steel",
elastic_modulus=200e3,
density=7.85e-6,
yield_strength=500,
ultimate_stress_strain_profile=SteelElasticPlastic(
yield_strength=500,
elastic_modulus=200e3,
fracture_strain=0.05,
),
colour="grey",
)
geometry = concrete_rectangular_section(
b=300,
d=450,
dia_top=24,
n_top=2,
dia_bot=24,
n_bot=4,
n_circle=4,
cover=48,
area_top=450,
area_bot=450,
conc_mat=concrete,
steel_mat=steel,
)
conc_sec = ConcreteSection(geometry)
n, mx, my, mv, d_n = conc_sec.ultimate_bending_capacity()
assert pytest.approx(d_n, rel=0.03) == 100.7
assert pytest.approx(mx, rel=0.01) == 309e6
def test_example_3_11():
concrete = Concrete(
name="25 MPa Concrete",
elastic_modulus=26.7e3,
density=2.4e-6,
ultimate_stress_strain_profile=WhitneyStressBlock(
alpha_2=0.85,
gamma=0.85,
compressive_strength=25,
ultimate_strain=0.003,
),
alpha_1=0.85,
flexural_tensile_strength=0,
residual_shrinkage_stress=0,
colour="lightgrey",
)
steel = Steel(
name="500 MPa Steel",
elastic_modulus=200e3,
density=7.85e-6,
yield_strength=500,
ultimate_stress_strain_profile=SteelElasticPlastic(
yield_strength=500,
elastic_modulus=200e3,
fracture_strain=0.05,
),
colour="grey",
)
geom = concrete_tee_section(
b=400,
d=726,
b_f=1100,
d_f=120,
dia_top=20,
n_top=0,
dia_bot=28,
n_bot=0,
n_circle=4,
cover=30,
conc_mat=concrete,
steel_mat=steel,
)
# bot bars 1
for idx in range(4):
bar = circular_section_by_area(area=800, n=4, material=steel).shift_section(
x_offset=46 + idx * 308 / 3, y_offset=46
)
geom = (geom - bar) + bar
# bot bars 2
for idx in range(4):
bar = circular_section_by_area(area=800, n=4, material=steel).shift_section(
x_offset=46 + idx * 308 / 3, y_offset=106
)
geom = (geom - bar) + bar
conc_sec = ConcreteSection(geom)
n, mx, my, mv, d_n = conc_sec.ultimate_bending_capacity()
assert pytest.approx(d_n, abs=1) == 196
assert pytest.approx(mx, rel=0.01) == 1860e6
def test_example_3_14():
pass
# TODO: implement!
| 26.162437 | 116 | 0.588766 | 1,300 | 10,308 | 4.428462 | 0.14 | 0.048636 | 0.042904 | 0.056279 | 0.865208 | 0.865208 | 0.865208 | 0.850096 | 0.816745 | 0.796943 | 0 | 0.093459 | 0.311797 | 10,308 | 393 | 117 | 26.229008 | 0.718072 | 0.041036 | 0 | 0.776074 | 0 | 0 | 0.024921 | 0 | 0 | 0 | 0 | 0.002545 | 0.03681 | 1 | 0.021472 | false | 0.003067 | 0.018405 | 0 | 0.039877 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f5c3fa8031d960b2bca0cd823ecb978917ac9d18 | 17,080 | py | Python | projects/Python/tests/test_enums.py | kokizzu/FastBinaryEncoding | eec54ef2fb83de0bb24cc33a591f9896d360f23c | [
"MIT"
] | 563 | 2018-09-04T23:52:42.000Z | 2022-03-24T01:35:50.000Z | projects/Python/tests/test_enums.py | kokizzu/FastBinaryEncoding | eec54ef2fb83de0bb24cc33a591f9896d360f23c | [
"MIT"
] | 44 | 2018-12-04T11:13:34.000Z | 2022-03-01T00:22:05.000Z | projects/Python/tests/test_enums.py | kokizzu/FastBinaryEncoding | eec54ef2fb83de0bb24cc33a591f9896d360f23c | [
"MIT"
] | 69 | 2018-11-06T12:15:39.000Z | 2022-02-10T13:51:01.000Z | import fbe
from proto import enums
from unittest import TestCase
class TestEnums(TestCase):
def test_serialization_enums(self):
enums1 = enums.Enums()
# Serialize enums to the FBE stream
writer = enums.EnumsModel(fbe.WriteBuffer())
self.assertEqual(writer.model.fbe_offset, 4)
serialized = writer.serialize(enums1)
self.assertEqual(serialized, writer.buffer.size)
self.assertTrue(writer.verify())
writer.next(serialized)
self.assertEqual(writer.model.fbe_offset, (4 + writer.buffer.size))
# Check the serialized FBE size
self.assertEqual(writer.buffer.size, 232)
# Deserialize enums from the FBE stream
enums2 = enums.Enums()
reader = enums.EnumsModel(fbe.ReadBuffer())
self.assertEqual(reader.model.fbe_offset, 4)
reader.attach_buffer(writer.buffer)
self.assertTrue(reader.verify())
(enums2, deserialized) = reader.deserialize(enums2)
self.assertEqual(deserialized, reader.buffer.size)
reader.next(deserialized)
self.assertEqual(reader.model.fbe_offset, (4 + reader.buffer.size))
self.assertEqual(enums2.byte0, enums.EnumByte.ENUM_VALUE_0)
self.assertEqual(enums2.byte1, enums.EnumByte.ENUM_VALUE_1)
self.assertEqual(enums2.byte2, enums.EnumByte.ENUM_VALUE_2)
self.assertEqual(enums2.byte3, enums.EnumByte.ENUM_VALUE_3)
self.assertEqual(enums2.byte4, enums.EnumByte.ENUM_VALUE_4)
self.assertEqual(enums2.byte5, enums1.byte3)
self.assertEqual(enums2.char0, enums.EnumChar.ENUM_VALUE_0)
self.assertEqual(enums2.char1, enums.EnumChar.ENUM_VALUE_1)
self.assertEqual(enums2.char2, enums.EnumChar.ENUM_VALUE_2)
self.assertEqual(enums2.char3, enums.EnumChar.ENUM_VALUE_3)
self.assertEqual(enums2.char4, enums.EnumChar.ENUM_VALUE_4)
self.assertEqual(enums2.char5, enums1.char3)
self.assertEqual(enums2.wchar0, enums.EnumWChar.ENUM_VALUE_0)
self.assertEqual(enums2.wchar1, enums.EnumWChar.ENUM_VALUE_1)
self.assertEqual(enums2.wchar2, enums.EnumWChar.ENUM_VALUE_2)
self.assertEqual(enums2.wchar3, enums.EnumWChar.ENUM_VALUE_3)
self.assertEqual(enums2.wchar4, enums.EnumWChar.ENUM_VALUE_4)
self.assertEqual(enums2.wchar5, enums1.wchar3)
self.assertEqual(enums2.int8b0, enums.EnumInt8.ENUM_VALUE_0)
self.assertEqual(enums2.int8b1, enums.EnumInt8.ENUM_VALUE_1)
self.assertEqual(enums2.int8b2, enums.EnumInt8.ENUM_VALUE_2)
self.assertEqual(enums2.int8b3, enums.EnumInt8.ENUM_VALUE_3)
self.assertEqual(enums2.int8b4, enums.EnumInt8.ENUM_VALUE_4)
self.assertEqual(enums2.int8b5, enums1.int8b3)
self.assertEqual(enums2.uint8b0, enums.EnumUInt8.ENUM_VALUE_0)
self.assertEqual(enums2.uint8b1, enums.EnumUInt8.ENUM_VALUE_1)
self.assertEqual(enums2.uint8b2, enums.EnumUInt8.ENUM_VALUE_2)
self.assertEqual(enums2.uint8b3, enums.EnumUInt8.ENUM_VALUE_3)
self.assertEqual(enums2.uint8b4, enums.EnumUInt8.ENUM_VALUE_4)
self.assertEqual(enums2.uint8b5, enums1.uint8b3)
self.assertEqual(enums2.int16b0, enums.EnumInt16.ENUM_VALUE_0)
self.assertEqual(enums2.int16b1, enums.EnumInt16.ENUM_VALUE_1)
self.assertEqual(enums2.int16b2, enums.EnumInt16.ENUM_VALUE_2)
self.assertEqual(enums2.int16b3, enums.EnumInt16.ENUM_VALUE_3)
self.assertEqual(enums2.int16b4, enums.EnumInt16.ENUM_VALUE_4)
self.assertEqual(enums2.int16b5, enums1.int16b3)
self.assertEqual(enums2.uint16b0, enums.EnumUInt16.ENUM_VALUE_0)
self.assertEqual(enums2.uint16b1, enums.EnumUInt16.ENUM_VALUE_1)
self.assertEqual(enums2.uint16b2, enums.EnumUInt16.ENUM_VALUE_2)
self.assertEqual(enums2.uint16b3, enums.EnumUInt16.ENUM_VALUE_3)
self.assertEqual(enums2.uint16b4, enums.EnumUInt16.ENUM_VALUE_4)
self.assertEqual(enums2.uint16b5, enums1.uint16b3)
self.assertEqual(enums2.int32b0, enums.EnumInt32.ENUM_VALUE_0)
self.assertEqual(enums2.int32b1, enums.EnumInt32.ENUM_VALUE_1)
self.assertEqual(enums2.int32b2, enums.EnumInt32.ENUM_VALUE_2)
self.assertEqual(enums2.int32b3, enums.EnumInt32.ENUM_VALUE_3)
self.assertEqual(enums2.int32b4, enums.EnumInt32.ENUM_VALUE_4)
self.assertEqual(enums2.int32b5, enums1.int32b3)
self.assertEqual(enums2.uint32b0, enums.EnumUInt32.ENUM_VALUE_0)
self.assertEqual(enums2.uint32b1, enums.EnumUInt32.ENUM_VALUE_1)
self.assertEqual(enums2.uint32b2, enums.EnumUInt32.ENUM_VALUE_2)
self.assertEqual(enums2.uint32b3, enums.EnumUInt32.ENUM_VALUE_3)
self.assertEqual(enums2.uint32b4, enums.EnumUInt32.ENUM_VALUE_4)
self.assertEqual(enums2.uint32b5, enums1.uint32b3)
self.assertEqual(enums2.int64b0, enums.EnumInt64.ENUM_VALUE_0)
self.assertEqual(enums2.int64b1, enums.EnumInt64.ENUM_VALUE_1)
self.assertEqual(enums2.int64b2, enums.EnumInt64.ENUM_VALUE_2)
self.assertEqual(enums2.int64b3, enums.EnumInt64.ENUM_VALUE_3)
self.assertEqual(enums2.int64b4, enums.EnumInt64.ENUM_VALUE_4)
self.assertEqual(enums2.int64b5, enums1.int64b3)
self.assertEqual(enums2.uint64b0, enums.EnumUInt64.ENUM_VALUE_0)
self.assertEqual(enums2.uint64b1, enums.EnumUInt64.ENUM_VALUE_1)
self.assertEqual(enums2.uint64b2, enums.EnumUInt64.ENUM_VALUE_2)
self.assertEqual(enums2.uint64b3, enums.EnumUInt64.ENUM_VALUE_3)
self.assertEqual(enums2.uint64b4, enums.EnumUInt64.ENUM_VALUE_4)
self.assertEqual(enums2.uint64b5, enums1.uint64b3)
def test_serialization_final_enums(self):
enums1 = enums.Enums()
# Serialize enums to the FBE stream
writer = enums.EnumsFinalModel(fbe.WriteBuffer())
serialized = writer.serialize(enums1)
self.assertEqual(serialized, writer.buffer.size)
self.assertTrue(writer.verify())
writer.next(serialized)
# Check the serialized FBE size
self.assertEqual(writer.buffer.size, 224)
# Deserialize enums from the FBE stream
enums2 = enums.Enums()
reader = enums.EnumsFinalModel(fbe.ReadBuffer())
reader.attach_buffer(writer.buffer)
self.assertTrue(reader.verify())
(enums2, deserialized) = reader.deserialize(enums2)
self.assertEqual(deserialized, reader.buffer.size)
reader.next(deserialized)
self.assertEqual(enums2.byte0, enums.EnumByte.ENUM_VALUE_0)
self.assertEqual(enums2.byte1, enums.EnumByte.ENUM_VALUE_1)
self.assertEqual(enums2.byte2, enums.EnumByte.ENUM_VALUE_2)
self.assertEqual(enums2.byte3, enums.EnumByte.ENUM_VALUE_3)
self.assertEqual(enums2.byte4, enums.EnumByte.ENUM_VALUE_4)
self.assertEqual(enums2.byte5, enums1.byte3)
self.assertEqual(enums2.char0, enums.EnumChar.ENUM_VALUE_0)
self.assertEqual(enums2.char1, enums.EnumChar.ENUM_VALUE_1)
self.assertEqual(enums2.char2, enums.EnumChar.ENUM_VALUE_2)
self.assertEqual(enums2.char3, enums.EnumChar.ENUM_VALUE_3)
self.assertEqual(enums2.char4, enums.EnumChar.ENUM_VALUE_4)
self.assertEqual(enums2.char5, enums1.char3)
self.assertEqual(enums2.wchar0, enums.EnumWChar.ENUM_VALUE_0)
self.assertEqual(enums2.wchar1, enums.EnumWChar.ENUM_VALUE_1)
self.assertEqual(enums2.wchar2, enums.EnumWChar.ENUM_VALUE_2)
self.assertEqual(enums2.wchar3, enums.EnumWChar.ENUM_VALUE_3)
self.assertEqual(enums2.wchar4, enums.EnumWChar.ENUM_VALUE_4)
self.assertEqual(enums2.wchar5, enums1.wchar3)
self.assertEqual(enums2.int8b0, enums.EnumInt8.ENUM_VALUE_0)
self.assertEqual(enums2.int8b1, enums.EnumInt8.ENUM_VALUE_1)
self.assertEqual(enums2.int8b2, enums.EnumInt8.ENUM_VALUE_2)
self.assertEqual(enums2.int8b3, enums.EnumInt8.ENUM_VALUE_3)
self.assertEqual(enums2.int8b4, enums.EnumInt8.ENUM_VALUE_4)
self.assertEqual(enums2.int8b5, enums1.int8b3)
self.assertEqual(enums2.uint8b0, enums.EnumUInt8.ENUM_VALUE_0)
self.assertEqual(enums2.uint8b1, enums.EnumUInt8.ENUM_VALUE_1)
self.assertEqual(enums2.uint8b2, enums.EnumUInt8.ENUM_VALUE_2)
self.assertEqual(enums2.uint8b3, enums.EnumUInt8.ENUM_VALUE_3)
self.assertEqual(enums2.uint8b4, enums.EnumUInt8.ENUM_VALUE_4)
self.assertEqual(enums2.uint8b5, enums1.uint8b3)
self.assertEqual(enums2.int16b0, enums.EnumInt16.ENUM_VALUE_0)
self.assertEqual(enums2.int16b1, enums.EnumInt16.ENUM_VALUE_1)
self.assertEqual(enums2.int16b2, enums.EnumInt16.ENUM_VALUE_2)
self.assertEqual(enums2.int16b3, enums.EnumInt16.ENUM_VALUE_3)
self.assertEqual(enums2.int16b4, enums.EnumInt16.ENUM_VALUE_4)
self.assertEqual(enums2.int16b5, enums1.int16b3)
self.assertEqual(enums2.uint16b0, enums.EnumUInt16.ENUM_VALUE_0)
self.assertEqual(enums2.uint16b1, enums.EnumUInt16.ENUM_VALUE_1)
self.assertEqual(enums2.uint16b2, enums.EnumUInt16.ENUM_VALUE_2)
self.assertEqual(enums2.uint16b3, enums.EnumUInt16.ENUM_VALUE_3)
self.assertEqual(enums2.uint16b4, enums.EnumUInt16.ENUM_VALUE_4)
self.assertEqual(enums2.uint16b5, enums1.uint16b3)
self.assertEqual(enums2.int32b0, enums.EnumInt32.ENUM_VALUE_0)
self.assertEqual(enums2.int32b1, enums.EnumInt32.ENUM_VALUE_1)
self.assertEqual(enums2.int32b2, enums.EnumInt32.ENUM_VALUE_2)
self.assertEqual(enums2.int32b3, enums.EnumInt32.ENUM_VALUE_3)
self.assertEqual(enums2.int32b4, enums.EnumInt32.ENUM_VALUE_4)
self.assertEqual(enums2.int32b5, enums1.int32b3)
self.assertEqual(enums2.uint32b0, enums.EnumUInt32.ENUM_VALUE_0)
self.assertEqual(enums2.uint32b1, enums.EnumUInt32.ENUM_VALUE_1)
self.assertEqual(enums2.uint32b2, enums.EnumUInt32.ENUM_VALUE_2)
self.assertEqual(enums2.uint32b3, enums.EnumUInt32.ENUM_VALUE_3)
self.assertEqual(enums2.uint32b4, enums.EnumUInt32.ENUM_VALUE_4)
self.assertEqual(enums2.uint32b5, enums1.uint32b3)
self.assertEqual(enums2.int64b0, enums.EnumInt64.ENUM_VALUE_0)
self.assertEqual(enums2.int64b1, enums.EnumInt64.ENUM_VALUE_1)
self.assertEqual(enums2.int64b2, enums.EnumInt64.ENUM_VALUE_2)
self.assertEqual(enums2.int64b3, enums.EnumInt64.ENUM_VALUE_3)
self.assertEqual(enums2.int64b4, enums.EnumInt64.ENUM_VALUE_4)
self.assertEqual(enums2.int64b5, enums1.int64b3)
self.assertEqual(enums2.uint64b0, enums.EnumUInt64.ENUM_VALUE_0)
self.assertEqual(enums2.uint64b1, enums.EnumUInt64.ENUM_VALUE_1)
self.assertEqual(enums2.uint64b2, enums.EnumUInt64.ENUM_VALUE_2)
self.assertEqual(enums2.uint64b3, enums.EnumUInt64.ENUM_VALUE_3)
self.assertEqual(enums2.uint64b4, enums.EnumUInt64.ENUM_VALUE_4)
self.assertEqual(enums2.uint64b5, enums1.uint64b3)
def test_serialization_json_enums(self):
# Define a source JSON string
json = r'{"byte0":0,"byte1":0,"byte2":1,"byte3":254,"byte4":255,"byte5":254,"char0":0,"char1":49,"char2":50,"char3":51,"char4":52,"char5":51,"wchar0":0,"wchar1":1092,"wchar2":1093,"wchar3":1365,"wchar4":1366,"wchar5":1365,"int8b0":0,"int8b1":-128,"int8b2":-127,"int8b3":126,"int8b4":127,"int8b5":126,"uint8b0":0,"uint8b1":0,"uint8b2":1,"uint8b3":254,"uint8b4":255,"uint8b5":254,"int16b0":0,"int16b1":-32768,"int16b2":-32767,"int16b3":32766,"int16b4":32767,"int16b5":32766,"uint16b0":0,"uint16b1":0,"uint16b2":1,"uint16b3":65534,"uint16b4":65535,"uint16b5":65534,"int32b0":0,"int32b1":-2147483648,"int32b2":-2147483647,"int32b3":2147483646,"int32b4":2147483647,"int32b5":2147483646,"uint32b0":0,"uint32b1":0,"uint32b2":1,"uint32b3":4294967294,"uint32b4":4294967295,"uint32b5":4294967294,"int64b0":0,"int64b1":-9223372036854775807,"int64b2":-9223372036854775806,"int64b3":9223372036854775806,"int64b4":9223372036854775807,"int64b5":9223372036854775806,"uint64b0":0,"uint64b1":0,"uint64b2":1,"uint64b3":18446744073709551614,"uint64b4":18446744073709551615,"uint64b5":18446744073709551614}'
# Create enums from the source JSON string
enums1 = enums.Enums.from_json(json)
# Serialize enums to the JSON string
json = enums1.to_json()
# Check the serialized JSON size
self.assertGreater(len(json), 0)
# Deserialize enums from the JSON string
enums2 = enums.Enums.from_json(json)
self.assertEqual(enums2.byte0, enums.EnumByte.ENUM_VALUE_0)
self.assertEqual(enums2.byte1, enums.EnumByte.ENUM_VALUE_1)
self.assertEqual(enums2.byte2, enums.EnumByte.ENUM_VALUE_2)
self.assertEqual(enums2.byte3, enums.EnumByte.ENUM_VALUE_3)
self.assertEqual(enums2.byte4, enums.EnumByte.ENUM_VALUE_4)
self.assertEqual(enums2.byte5, enums1.byte3)
self.assertEqual(enums2.char0, enums.EnumChar.ENUM_VALUE_0)
self.assertEqual(enums2.char1, enums.EnumChar.ENUM_VALUE_1)
self.assertEqual(enums2.char2, enums.EnumChar.ENUM_VALUE_2)
self.assertEqual(enums2.char3, enums.EnumChar.ENUM_VALUE_3)
self.assertEqual(enums2.char4, enums.EnumChar.ENUM_VALUE_4)
self.assertEqual(enums2.char5, enums1.char3)
self.assertEqual(enums2.wchar0, enums.EnumWChar.ENUM_VALUE_0)
self.assertEqual(enums2.wchar1, enums.EnumWChar.ENUM_VALUE_1)
self.assertEqual(enums2.wchar2, enums.EnumWChar.ENUM_VALUE_2)
self.assertEqual(enums2.wchar3, enums.EnumWChar.ENUM_VALUE_3)
self.assertEqual(enums2.wchar4, enums.EnumWChar.ENUM_VALUE_4)
self.assertEqual(enums2.wchar5, enums1.wchar3)
self.assertEqual(enums2.int8b0, enums.EnumInt8.ENUM_VALUE_0)
self.assertEqual(enums2.int8b1, enums.EnumInt8.ENUM_VALUE_1)
self.assertEqual(enums2.int8b2, enums.EnumInt8.ENUM_VALUE_2)
self.assertEqual(enums2.int8b3, enums.EnumInt8.ENUM_VALUE_3)
self.assertEqual(enums2.int8b4, enums.EnumInt8.ENUM_VALUE_4)
self.assertEqual(enums2.int8b5, enums1.int8b3)
self.assertEqual(enums2.uint8b0, enums.EnumUInt8.ENUM_VALUE_0)
self.assertEqual(enums2.uint8b1, enums.EnumUInt8.ENUM_VALUE_1)
self.assertEqual(enums2.uint8b2, enums.EnumUInt8.ENUM_VALUE_2)
self.assertEqual(enums2.uint8b3, enums.EnumUInt8.ENUM_VALUE_3)
self.assertEqual(enums2.uint8b4, enums.EnumUInt8.ENUM_VALUE_4)
self.assertEqual(enums2.uint8b5, enums1.uint8b3)
self.assertEqual(enums2.int16b0, enums.EnumInt16.ENUM_VALUE_0)
self.assertEqual(enums2.int16b1, enums.EnumInt16.ENUM_VALUE_1)
self.assertEqual(enums2.int16b2, enums.EnumInt16.ENUM_VALUE_2)
self.assertEqual(enums2.int16b3, enums.EnumInt16.ENUM_VALUE_3)
self.assertEqual(enums2.int16b4, enums.EnumInt16.ENUM_VALUE_4)
self.assertEqual(enums2.int16b5, enums1.int16b3)
self.assertEqual(enums2.uint16b0, enums.EnumUInt16.ENUM_VALUE_0)
self.assertEqual(enums2.uint16b1, enums.EnumUInt16.ENUM_VALUE_1)
self.assertEqual(enums2.uint16b2, enums.EnumUInt16.ENUM_VALUE_2)
self.assertEqual(enums2.uint16b3, enums.EnumUInt16.ENUM_VALUE_3)
self.assertEqual(enums2.uint16b4, enums.EnumUInt16.ENUM_VALUE_4)
self.assertEqual(enums2.uint16b5, enums1.uint16b3)
self.assertEqual(enums2.int32b0, enums.EnumInt32.ENUM_VALUE_0)
self.assertEqual(enums2.int32b1, enums.EnumInt32.ENUM_VALUE_1)
self.assertEqual(enums2.int32b2, enums.EnumInt32.ENUM_VALUE_2)
self.assertEqual(enums2.int32b3, enums.EnumInt32.ENUM_VALUE_3)
self.assertEqual(enums2.int32b4, enums.EnumInt32.ENUM_VALUE_4)
self.assertEqual(enums2.int32b5, enums1.int32b3)
self.assertEqual(enums2.uint32b0, enums.EnumUInt32.ENUM_VALUE_0)
self.assertEqual(enums2.uint32b1, enums.EnumUInt32.ENUM_VALUE_1)
self.assertEqual(enums2.uint32b2, enums.EnumUInt32.ENUM_VALUE_2)
self.assertEqual(enums2.uint32b3, enums.EnumUInt32.ENUM_VALUE_3)
self.assertEqual(enums2.uint32b4, enums.EnumUInt32.ENUM_VALUE_4)
self.assertEqual(enums2.uint32b5, enums1.uint32b3)
self.assertEqual(enums2.int64b0, enums.EnumInt64.ENUM_VALUE_0)
self.assertEqual(enums2.int64b1, enums.EnumInt64.ENUM_VALUE_1)
self.assertEqual(enums2.int64b2, enums.EnumInt64.ENUM_VALUE_2)
self.assertEqual(enums2.int64b3, enums.EnumInt64.ENUM_VALUE_3)
self.assertEqual(enums2.int64b4, enums.EnumInt64.ENUM_VALUE_4)
self.assertEqual(enums2.int64b5, enums1.int64b3)
self.assertEqual(enums2.uint64b0, enums.EnumUInt64.ENUM_VALUE_0)
self.assertEqual(enums2.uint64b1, enums.EnumUInt64.ENUM_VALUE_1)
self.assertEqual(enums2.uint64b2, enums.EnumUInt64.ENUM_VALUE_2)
self.assertEqual(enums2.uint64b3, enums.EnumUInt64.ENUM_VALUE_3)
self.assertEqual(enums2.uint64b4, enums.EnumUInt64.ENUM_VALUE_4)
self.assertEqual(enums2.uint64b5, enums1.uint64b3)
| 56.556291 | 1,094 | 0.739227 | 2,089 | 17,080 | 5.877932 | 0.07707 | 0.254092 | 0.338627 | 0.037625 | 0.89763 | 0.894047 | 0.894047 | 0.888183 | 0.883052 | 0.883052 | 0 | 0.114933 | 0.152342 | 17,080 | 301 | 1,095 | 56.744186 | 0.733181 | 0.022073 | 0 | 0.909091 | 0 | 0.004132 | 0.064466 | 0.064466 | 0 | 0 | 0 | 0 | 0.880165 | 1 | 0.012397 | false | 0 | 0.012397 | 0 | 0.028926 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
de1a2ee3a1122f81b5e65ef94c2f45fecdea020c | 37,764 | py | Python | web/transiq/restapi/serializers/employee.py | manibhushan05/transiq | 763fafb271ce07d13ac8ce575f2fee653cf39343 | [
"Apache-2.0"
] | null | null | null | web/transiq/restapi/serializers/employee.py | manibhushan05/transiq | 763fafb271ce07d13ac8ce575f2fee653cf39343 | [
"Apache-2.0"
] | 14 | 2020-06-05T23:06:45.000Z | 2022-03-12T00:00:18.000Z | web/transiq/restapi/serializers/employee.py | manibhushan05/transiq | 763fafb271ce07d13ac8ce575f2fee653cf39343 | [
"Apache-2.0"
] | null | null | null | from django.contrib.auth.models import User
from rest_framework import serializers, ISO_8601
from rest_framework.validators import UniqueValidator, UniqueTogetherValidator
from api.utils import get_or_none
from employee.models import Employee, Designation, Department, FitnessDetail, PastEmployment, PermanentAddress, \
Referral, EmploymentAgency, CurrentEmploymentDetails, EducationalDegree, CertificationCourse, SkillSet, Nominee, \
LeaveRecord, Salary, Task, TaskEmail
from restapi.helper_api import DATE_FORMAT, DATETIME_FORMAT
from restapi.serializers.authentication import UserSerializer, BankSerializer
from restapi.serializers.utils import CitySerializer, AahoOfficeSerializer
from restapi.service.validators import validate_mobile_number
from utils.models import City, AahoOffice, Bank
class DesignationSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
name = serializers.CharField(max_length=100, validators=[UniqueValidator(queryset=Designation.objects.all())])
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
def create(self, validated_data):
instance = Designation.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
Designation.objects.filter(id=instance.id).update(**validated_data)
return Designation.objects.get(id=instance.id)
class DepartmentSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
name = serializers.CharField(max_length=100, validators=[UniqueValidator(queryset=Department.objects.all())])
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
def create(self, validated_data):
instance = Department.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
Department.objects.filter(id=instance.id).update(**validated_data)
return Department.objects.get(id=instance.id)
class FitnessDetailSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
height = serializers.DecimalField(decimal_places=2, help_text='Height in Inches', max_digits=8, required=False)
weight = serializers.DecimalField(decimal_places=2, help_text='Weight in Kgs', max_digits=8, required=False)
blood_group = serializers.ChoiceField(allow_blank=True, allow_null=True, choices=(
('o_pos', 'O+'), ('o_neg', 'O-'), ('a_pos', 'A+'), ('a_neg', 'A-'), ('b_pos', 'B+'), ('b_neg', 'B-'),
('ab_pos', 'AB+'), ('ab_neg', 'AB-')), required=False)
medical_fitness = serializers.CharField(allow_blank=True, allow_null=True, required=False,
style={'base_template': 'textarea.html'})
emergency_contact_person_name = serializers.CharField(allow_blank=True, allow_null=True, max_length=35,
required=False)
emergency_contact_person_phone = serializers.CharField(allow_blank=True, allow_null=True, max_length=15,
required=False)
emergency_contact_person_email = serializers.EmailField(allow_blank=True, allow_null=True, max_length=35,
required=False)
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
def create(self, validated_data):
instance = FitnessDetail.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
FitnessDetail.objects.filter(id=instance.id).update(**validated_data)
return FitnessDetail.objects.get(id=instance.id)
class PastEmploymentSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
joining_date = serializers.DateField(allow_null=True, required=False, input_formats=[DATE_FORMAT, ISO_8601],
format=DATE_FORMAT)
leaving_date = serializers.DateField(allow_null=True, required=False, input_formats=[DATE_FORMAT, ISO_8601],
format=DATE_FORMAT)
organisation = serializers.CharField(allow_blank=True, allow_null=True, max_length=100, required=False)
designation = serializers.CharField(allow_blank=True, allow_null=True, max_length=100, required=False)
reporting_manager = serializers.CharField(allow_blank=True, allow_null=True, max_length=100, required=False)
gross_compensation = serializers.CharField(allow_blank=True, allow_null=True, max_length=20, required=False)
reason_for_change = serializers.CharField(allow_blank=True, allow_null=True, required=False,
style={'base_template': 'textarea.html'})
total_experience = serializers.CharField(allow_blank=True, allow_null=True, help_text='In year and months',
max_length=20,
required=False)
relevant_experience = serializers.CharField(allow_blank=True, allow_null=True, help_text='In year and months',
max_length=20,
required=False)
created_on = serializers.DateTimeField(read_only=True, format=DATETIME_FORMAT)
updated_on = serializers.DateTimeField(read_only=True, format=DATETIME_FORMAT)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
def create(self, validated_data):
instance = PastEmployment.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
PastEmployment.objects.filter(id=instance.id).update(**validated_data)
return PastEmployment.objects.get(id=instance.id)
class PermanentAddressSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
address = serializers.CharField(allow_blank=True, allow_null=True, max_length=200, required=False)
pin = serializers.CharField(allow_blank=True, allow_null=True, max_length=8, required=False)
phone = serializers.CharField(allow_blank=True, allow_null=True, max_length=15, required=False)
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
city = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=City.objects.all(), required=False)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
def to_representation(self, instance):
self.fields['city'] = CitySerializer(read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
instance = PermanentAddress.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
PermanentAddress.objects.filter(id=instance.id).update(**validated_data)
return PermanentAddress.objects.get(id=instance.id)
def validate_phone(self, value):
if not validate_mobile_number(value) and value:
raise serializers.ValidationError("Not a valid mobile number")
return value
class ReferralSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
name = serializers.CharField(allow_blank=True, allow_null=True, max_length=100, required=False)
organisation = serializers.CharField(allow_blank=True, allow_null=True, max_length=100, required=False)
designation = serializers.CharField(allow_blank=True, allow_null=True, max_length=100, required=False)
phone = serializers.CharField(allow_blank=True, allow_null=True, max_length=15, required=False)
address = serializers.CharField(allow_blank=True, allow_null=True, required=False)
email = serializers.EmailField(allow_blank=True, allow_null=True, max_length=35, required=False)
nature_of_assisstance = serializers.CharField(allow_blank=True, allow_null=True, required=False,
style={'base_template': 'textarea.html'})
number_of_year = serializers.CharField(allow_blank=True, allow_null=True, max_length=5, required=False)
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
def create(self, validated_data):
instance = Referral.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
Referral.objects.filter(id=instance.id).update(**validated_data)
return Referral.objects.get(id=instance.id)
def validate_phone(self, value):
if not validate_mobile_number(value) and value:
raise serializers.ValidationError("Not a valid mobile number")
return value
class EmploymentAgencySerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
agency_name = serializers.CharField(allow_blank=True, allow_null=True, max_length=70, required=False)
contact_name = serializers.CharField(allow_blank=True, allow_null=True, max_length=35, required=False)
phone = serializers.CharField(allow_blank=True, allow_null=True, max_length=15, required=False)
email = serializers.EmailField(allow_blank=True, allow_null=True, max_length=50, required=False)
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
def create(self, validated_data):
instance = EmploymentAgency.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
EmploymentAgency.objects.filter(id=instance.id).update(**validated_data)
return EmploymentAgency.objects.get(id=instance.id)
class EmployeeSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
employee_id = serializers.CharField(max_length=35,
validators=[UniqueValidator(queryset=Employee.objects.all())])
reporting_person = serializers.CharField(allow_blank=True, allow_null=True, max_length=35, required=False)
date_of_birth = serializers.DateField(
allow_null=True, required=False, input_formats=[DATE_FORMAT, ISO_8601], format=DATE_FORMAT)
date_of_joining = serializers.DateField(
allow_null=True, required=False, input_formats=[DATE_FORMAT, ISO_8601], format=DATE_FORMAT)
date_of_leaving = serializers.DateField(
allow_null=True, required=False, input_formats=[DATE_FORMAT, ISO_8601], format=DATE_FORMAT)
gender = serializers.ChoiceField(allow_null=True, choices=(('male', 'Male'), ('female', 'Female')), required=False)
pan = serializers.CharField(allow_blank=True, allow_null=True, max_length=20, required=False)
aadhaar = serializers.CharField(allow_blank=True, allow_null=True, max_length=20, required=False)
passport = serializers.CharField(allow_blank=True, allow_null=True, max_length=20, required=False)
marital_status = serializers.ChoiceField(
allow_blank=True, allow_null=True,
choices=(('married', 'Married'), ('unmarried', 'Unmarried'), ('divorcee', 'Divorcee')), required=False
)
status = serializers.ChoiceField(allow_null=True, choices=(('active', 'Active'), ('inactive', 'Inactive')),
required=False)
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
office = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=AahoOffice.objects.all(), required=False)
username = serializers.SlugRelatedField(queryset=User.objects.all(), required=False,
validators=[UniqueValidator(queryset=Employee.objects.all())],
slug_field="username")
designation = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Designation.objects.all(),
required=False)
department = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Department.objects.all(), required=False)
fitness_details = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=FitnessDetail.objects.all(),
required=False)
bank = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=Bank.objects.all(), required=False,
validators=[UniqueValidator(queryset=Employee.objects.all())])
past_employment = serializers.PrimaryKeyRelatedField(
allow_null=True, queryset=PastEmployment.objects.all(), required=False,
validators=[UniqueValidator(queryset=Employee.objects.all())]
)
referral = serializers.PrimaryKeyRelatedField(
write_only=True, allow_null=True, queryset=Referral.objects.all(), required=False,
validators=[UniqueValidator(queryset=Employee.objects.all())]
)
permanent_address = serializers.PrimaryKeyRelatedField(
write_only=True, allow_null=True, queryset=PermanentAddress.objects.all(), required=False,
validators=[UniqueValidator(queryset=Employee.objects.all())]
)
employment_agency = serializers.PrimaryKeyRelatedField(write_only=True, allow_null=True,
queryset=EmploymentAgency.objects.all(),
required=False)
office_multiple = serializers.PrimaryKeyRelatedField(write_only=True, allow_empty=False, many=True,
queryset=AahoOffice.objects.all())
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
emp_name = serializers.SerializerMethodField()
emp_phone = serializers.SerializerMethodField()
emp_alt_phone = serializers.SerializerMethodField()
emp_email = serializers.SerializerMethodField()
bank_account = serializers.SerializerMethodField()
def get_bank_account(self, instance):
if isinstance(instance.username, User):
bank = get_or_none(Bank, user=instance.username)
if isinstance(bank, Bank):
return BankSerializer(instance=bank).data
return {}
def get_emp_alt_phone(self, instance):
return instance.emp_alt_phone()
def get_emp_name(self, instance):
return instance.emp_name()
def get_emp_phone(self, instance):
return instance.emp_phone()
def get_emp_email(self, instance):
return instance.emp_email
def create(self, validated_data):
office_multiple = []
if "office_multiple" in validated_data.keys():
office_multiple = validated_data.pop("office_multiple")
instance = Employee.objects.create(**validated_data)
for office in office_multiple:
instance.office_multiple.add(office)
return instance
def update(self, instance, validated_data):
office_multiple = []
if "office_multiple" in validated_data.keys():
instance.office_multiple.clear()
office_multiple = validated_data.pop("office_multiple")
Employee.objects.filter(id=instance.id).update(**validated_data)
for office in office_multiple:
instance.office_multiple.add(office)
return Employee.objects.get(id=instance.id)
class CurrentEmploymentDetailsSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
current_job_responsibilities = serializers.CharField(allow_blank=True, allow_null=True,
label='Outline briefly current job responsibilities',
required=False,
style={'base_template': 'textarea.html'})
present_salary = serializers.IntegerField(help_text='CTC P.A.', max_value=2147483647, min_value=-2147483648,
required=False)
role = serializers.CharField(allow_blank=True, allow_null=True, max_length=200, required=False)
e_shops = serializers.CharField(allow_blank=True, allow_null=True, max_length=50, required=False)
date_of_acquisition = serializers.CharField(allow_blank=True, allow_null=True, max_length=15, required=False)
date_of_vesting = serializers.CharField(allow_blank=True, allow_null=True, max_length=15, required=False)
date_of_selling = serializers.CharField(allow_blank=True, allow_null=True, max_length=15, required=False)
pan = serializers.CharField(allow_blank=True, allow_null=True, max_length=15, required=False)
id_type = serializers.CharField(allow_blank=True, allow_null=True, max_length=70, required=False)
id_number = serializers.CharField(allow_blank=True, allow_null=True, max_length=25, required=False)
pf_account = serializers.CharField(allow_blank=True, allow_null=True, max_length=35, required=False)
remarks = serializers.CharField(allow_blank=True, allow_null=True, required=False,
style={'base_template': 'textarea.html'})
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
employee = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Employee.objects.all(), required=False)
designation = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Designation.objects.all(),
required=False)
department = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Department.objects.all(), required=False)
def to_representation(self, instance):
self.fields["employee"] = EmployeeSerializer(read_only=True)
self.fields["designation"] = DesignationSerializer(read_only=True)
self.fields["department"] = DepartmentSerializer(read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
instance = CurrentEmploymentDetails.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
CurrentEmploymentDetails.objects.filter(id=instance.id).update(**validated_data)
return CurrentEmploymentDetails.objects.get(id=instance.id)
class EducationalDegreeSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
course_name = serializers.CharField(allow_blank=True, allow_null=True, max_length=50, required=False)
college_name = serializers.CharField(allow_blank=True, allow_null=True, max_length=200, required=False)
university_name = serializers.CharField(allow_blank=True, allow_null=True, max_length=200, required=False)
specialization = serializers.CharField(allow_blank=True, allow_null=True, max_length=200, required=False)
marks_obtained = serializers.CharField(allow_blank=True, allow_null=True, max_length=20, required=False)
max_marks = serializers.CharField(allow_blank=True, allow_null=True, max_length=10, required=False)
passing_year = serializers.DateField(allow_null=True, required=False, input_formats=[DATE_FORMAT, ISO_8601],
format=DATE_FORMAT)
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
employee = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Employee.objects.all(), required=False)
def to_representation(self, instance):
self.fields["employee"] = EmployeeSerializer(read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
instance = EducationalDegree.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
EducationalDegree.objects.filter(id=instance.id).update(**validated_data)
return EducationalDegree.objects.get(id=instance.id)
class CertificationCourseSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
course_name = serializers.CharField(allow_blank=True, allow_null=True, max_length=200, required=False)
obtained_from = serializers.CharField(allow_blank=True, allow_null=True, max_length=200, required=False)
validity = serializers.DateField(allow_null=True, required=False, input_formats=[DATE_FORMAT, ISO_8601],
format=DATE_FORMAT)
quality = serializers.CharField(allow_blank=True, allow_null=True, max_length=20, required=False)
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
employee = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Employee.objects.all(), required=False)
def to_representation(self, instance):
self.fields["employee"] = EmployeeSerializer(read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
instance = CertificationCourse.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
CertificationCourse.objects.filter(id=instance.id).update(**validated_data)
return CertificationCourse.objects.get(id=instance.id)
class SkillSetSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
technical_skill = serializers.CharField(allow_blank=True, allow_null=True, required=False,
style={'base_template': 'textarea.html'})
professional_skill = serializers.CharField(allow_blank=True, allow_null=True, required=False,
style={'base_template': 'textarea.html'})
others = serializers.CharField(allow_blank=True, allow_null=True, required=False,
style={'base_template': 'textarea.html'})
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
employee = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Employee.objects.all(), required=False)
def to_representation(self, instance):
self.fields["employee"] = EmployeeSerializer(read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
instance = SkillSet.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
SkillSet.objects.filter(id=instance.id).update(**validated_data)
return SkillSet.objects.get(id=instance.id)
class NomineeSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
name_of_nominee = serializers.CharField(allow_blank=True, allow_null=True, max_length=35, required=False)
relationship_with_employee = serializers.CharField(allow_blank=True, allow_null=True, max_length=35, required=False)
type_of_nomination = serializers.CharField(allow_blank=True, allow_null=True, max_length=35, required=False)
nominee_age = serializers.CharField(allow_blank=True, allow_null=True, max_length=10, required=False)
percentage_share = serializers.DecimalField(decimal_places=2, help_text='Max share is 100.00', max_digits=8,
required=False)
address = serializers.CharField(allow_blank=True, allow_null=True, max_length=200, required=False)
pin = serializers.CharField(allow_blank=True, allow_null=True, max_length=8, required=False)
phone = serializers.CharField(allow_blank=True, allow_null=True, max_length=15, required=False)
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
employee = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Employee.objects.all(), required=False)
city = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=City.objects.all(), required=False)
def to_representation(self, instance):
self.fields["employee"] = EmployeeSerializer(read_only=True)
self.fields["city"] = CitySerializer(read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
instance = Nominee.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
Nominee.objects.filter(id=instance.id).update(**validated_data)
return Nominee.objects.get(id=instance.id)
def validate_phone(self, value):
if not validate_mobile_number(value) and value:
raise serializers.ValidationError("Not a valid mobile number")
return value
class LeaveRecordSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
leave_approval_status = serializers.ChoiceField(
choices=(('Approved', 'Approved'), ('Cancelled', 'Cancelled'), ('Pending', 'Pending')), required=False)
leave_category = serializers.ChoiceField(
choices=(('Paid Leave', 'Paid Leave'), ('Casual Leave', 'Casual Leave'), ('Medical Leave', 'Medical Leave')))
from_date = serializers.DateField(allow_null=True, required=False, input_formats=[DATE_FORMAT, ISO_8601],
format=DATE_FORMAT)
to_date = serializers.DateField(allow_null=True, required=False, input_formats=[DATE_FORMAT, ISO_8601],
format=DATE_FORMAT)
reason_for_leave = serializers.CharField(required=False,
style={'base_template': 'textarea.html'})
leave_balance = serializers.CharField(allow_blank=True, allow_null=True, max_length=70, required=False)
remarks = serializers.CharField(allow_blank=True, allow_null=True, required=False,
style={'base_template': 'textarea.html'})
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
employee = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Employee.objects.all(), required=False)
sanctioning_person = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Employee.objects.all(),
required=False)
def to_representation(self, instance):
self.fields["employee"] = EmployeeSerializer(read_only=True)
self.fields["sanctioning_person"] = EmployeeSerializer(read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
instance = LeaveRecord.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
LeaveRecord.objects.filter(id=instance.id).update(**validated_data)
return LeaveRecord.objects.get(id=instance.id)
class SalarySerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
pan_number = serializers.CharField(allow_blank=True, allow_null=True, max_length=13, required=False)
total_salary = serializers.IntegerField(allow_null=True, label='A. Total Salary', max_value=2147483647,
min_value=0,
required=False)
advance_payment = serializers.IntegerField(allow_null=True, label='B. Advance', max_value=2147483647,
min_value=0,
required=False)
travel_allowance = serializers.IntegerField(allow_null=True, label='C. Travel Allowance', max_value=2147483647,
min_value=-2147483648, required=False)
food_allowance = serializers.IntegerField(allow_null=True, label='D. Food Allowance', max_value=2147483647,
min_value=-2147483648, required=False)
mobile_allowance = serializers.IntegerField(allow_null=True, label='E. Mobile Allowance', max_value=2147483647,
min_value=-2147483648, required=False)
tax_deduction = serializers.IntegerField(allow_null=True, label='F. Mobile Allowance', max_value=2147483647,
min_value=-2147483648, required=False)
bill_submission = serializers.IntegerField(allow_null=True, label='G. Bill submission (apart from C, D, E)',
max_value=2147483647, min_value=-2147483648, required=False)
net_payable = serializers.IntegerField(allow_null=True, label='H. Net Payable (A-B+C+D+E-F+G)',
max_value=2147483647,
min_value=-2147483648, required=False)
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
employee = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Employee.objects.all(), required=False)
def to_representation(self, instance):
self.fields["employee"] = EmployeeSerializer(read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
instance = Salary.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
Salary.objects.filter(id=instance.id).update(**validated_data)
return Salary.objects.get(id=instance.id)
class TaskSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
name = serializers.CharField(max_length=70, validators=[UniqueValidator(queryset=Task.objects.all())])
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
def create(self, validated_data):
instance = Task.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
Task.objects.filter(id=instance.id).update(**validated_data)
return Task.objects.get(id=instance.id)
class TaskEmailSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
office = serializers.PrimaryKeyRelatedField(queryset=AahoOffice.objects.all(), required=True)
task = serializers.PrimaryKeyRelatedField(queryset=Task.objects.all(), required=True)
employee = serializers.PrimaryKeyRelatedField(allow_empty=False, many=True, queryset=Employee.objects.all())
def to_representation(self, instance):
self.fields["office"] = AahoOfficeSerializer(read_only=True)
self.fields["task"] = TaskSerializer(read_only=True)
self.fields["employee"] = EmployeeSerializer(many=True, read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
employee = []
if "employee" in validated_data.keys():
employee = validated_data.pop("employee")
instance = TaskEmail.objects.create(**validated_data)
for employ in employee:
instance.employee.add(employ)
return instance
def update(self, instance, validated_data):
employee = []
if "employee" in validated_data.keys():
instance.employee.clear()
employee = validated_data.pop("employee")
TaskEmail.objects.filter(id=instance.id).update(**validated_data)
for employ in employee:
instance.employee.add(employ)
return TaskEmail.objects.get(id=instance.id)
| 59.470866 | 120 | 0.717641 | 4,134 | 37,764 | 6.371553 | 0.071843 | 0.080448 | 0.060213 | 0.043888 | 0.844951 | 0.826576 | 0.785459 | 0.765224 | 0.751633 | 0.698254 | 0 | 0.010963 | 0.178768 | 37,764 | 634 | 121 | 59.564669 | 0.838358 | 0 | 0 | 0.56926 | 0 | 0 | 0.038132 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.096774 | false | 0.003795 | 0.018975 | 0.00759 | 0.711575 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
de703aa0d5e9216b4b2a2afc40bc0f3dce404f80 | 3,096 | py | Python | tests/vca_type_tests.py | h-medjahed/pyvcloud | 696243a7f418987936f1ddce94dfeceaa9fd04d2 | [
"Apache-2.0"
] | null | null | null | tests/vca_type_tests.py | h-medjahed/pyvcloud | 696243a7f418987936f1ddce94dfeceaa9fd04d2 | [
"Apache-2.0"
] | 3 | 2017-08-24T07:32:12.000Z | 2018-12-03T16:46:09.000Z | tests/vca_type_tests.py | useitcloud/pyvcloud | d3ed57b43d36d942edced2ee02cf6367d6826bb8 | [
"Apache-2.0"
] | 1 | 2018-07-10T13:56:58.000Z | 2018-07-10T13:56:58.000Z | from pyvcloud.vcloudair import VCA
class TestVCAType:
def test_0001(self):
"""Identify vCloud Director Standalone"""
vca = VCA(host='https://p1v21-vcd.vchs.vmware.com',
username='', verify=True, log=True)
assert vca is not None
service_type = vca.get_service_type()
assert service_type == VCA.VCA_SERVICE_TYPE_STANDALONE
def test_0002(self):
"""Identify vchs is not vCloud Director Standalone"""
vca = VCA(
host='https://vchs.vmware.com',
username='',
verify=True,
log=True)
assert vca is not None
service_type = vca.get_service_type()
assert service_type != VCA.VCA_SERVICE_TYPE_STANDALONE
def test_0003(self):
"""Identify vca is not vCloud Director Standalone"""
vca = VCA(
host='https://vca.vmware.com',
username='',
verify=True,
log=True)
assert vca is not None
service_type = vca.get_service_type()
assert service_type != VCA.VCA_SERVICE_TYPE_STANDALONE
def test_0011(self):
"""Identify vCloud Air vchs"""
vca = VCA(
host='https://vchs.vmware.com',
username='',
verify=True,
log=True)
assert vca is not None
service_type = vca.get_service_type()
assert service_type == VCA.VCA_SERVICE_TYPE_VCHS
def test_0012(self):
"""Identify vca is not vCloud Air vchs"""
vca = VCA(
host='https://vca.vmware.com',
username='',
verify=True,
log=True)
assert vca is not None
service_type = vca.get_service_type()
assert service_type != VCA.VCA_SERVICE_TYPE_VCHS
def test_0013(self):
"""Identify standalone is not vCloud Air vchs"""
vca = VCA(host='https://p1v21-vcd.vchs.vmware.com',
username='', verify=True, log=True)
assert vca is not None
service_type = vca.get_service_type()
assert service_type != VCA.VCA_SERVICE_TYPE_VCHS
def test_0021(self):
"""Identify vCloud Air vca"""
vca = VCA(host='https://iam.vchs.vmware.com',
username='', verify=True, log=True)
assert vca is not None
service_type = vca.get_service_type()
assert service_type == VCA.VCA_SERVICE_TYPE_VCA
def test_0022(self):
"""Identify vchs is not vCloud Air vca"""
vca = VCA(
host='https://vchs.vmware.com',
username='',
verify=True,
log=True)
assert vca is not None
service_type = vca.get_service_type()
assert service_type != VCA.VCA_SERVICE_TYPE_VCA
def test_0023(self):
"""Identify standalone is not vCloud Air vca"""
vca = VCA(host='https://p1v21-vcd.vchs.vmware.com',
username='', verify=True, log=True)
assert vca is not None
service_type = vca.get_service_type()
assert service_type != VCA.VCA_SERVICE_TYPE_VCA
| 34.021978 | 62 | 0.584302 | 383 | 3,096 | 4.535248 | 0.112272 | 0.227979 | 0.169257 | 0.07772 | 0.92631 | 0.92631 | 0.890616 | 0.82844 | 0.82844 | 0.75475 | 0 | 0.021097 | 0.311047 | 3,096 | 90 | 63 | 34.4 | 0.793249 | 0.108527 | 0 | 0.788732 | 0 | 0 | 0.088062 | 0 | 0 | 0 | 0 | 0 | 0.253521 | 1 | 0.126761 | false | 0 | 0.014085 | 0 | 0.15493 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
dee4b34ef730a11ee31235785a80e2be669f85a9 | 17,381 | py | Python | scripts/intf_mirror/test_intf_mirror6.py | atsgen/tf-test | 2748fcd81491450c75dadc71849d2a1c11061029 | [
"Apache-2.0"
] | 5 | 2020-09-29T00:36:57.000Z | 2022-02-16T06:51:32.000Z | scripts/intf_mirror/test_intf_mirror6.py | atsgen/tf-test | 2748fcd81491450c75dadc71849d2a1c11061029 | [
"Apache-2.0"
] | 27 | 2019-11-02T02:18:34.000Z | 2022-02-24T18:49:08.000Z | scripts/intf_mirror/test_intf_mirror6.py | atsgen/tf-test | 2748fcd81491450c75dadc71849d2a1c11061029 | [
"Apache-2.0"
] | 20 | 2019-11-28T16:02:25.000Z | 2022-01-06T05:56:58.000Z | """Intf mirroring IPv6 Regression tests."""
# Written by : ankitja@juniper.net
# Maintained by : ankitja@juniper.net
import os
import unittest
import fixtures
import testtools
import test
from common.connections import ContrailConnections
from common.contrail_test_init import ContrailTestInit
from tcutils.wrappers import preposttest_wrapper
from common.intf_mirroring.verify import VerifyIntfMirror
from .base import BaseIntfMirrorTest
class TestIntfMirror6(BaseIntfMirrorTest, VerifyIntfMirror):
@classmethod
def setUpClass(cls):
super(TestIntfMirror6, cls).setUpClass()
def runTest(self):
pass
# end runTest
@preposttest_wrapper
def test_juniper_header6(self):
"""Validate the presence of juniper header IPv6 cases
1) Check pkts get mirrored from both sub intf and parent intf when enabled on both
2) Verify if the juniper header is correct
3) Verify if the inner header is correct
Maintainer : ankitja@juniper.net
"""
return self.verify_juniper_header_testcase(header=2, ipv6=True)
@preposttest_wrapper
def test_juniper_header6_ingress(self):
"""Validate the presence of juniper header with ingress IPv6 cases
1) Check pkts get mirrored from both sub intf and parent intf when enabled on both
2) Verify if the juniper header is correct
3) Verify if the inner header is correct
Maintainer : ankitja@juniper.net
"""
return self.verify_juniper_header_testcase(header=2, direction='ingress', ipv6=True)
@preposttest_wrapper
def test_juniper_header6_egress(self):
"""Validate the presence of juniper header with egress IPv6 cases
1) Check pkts get mirrored from both sub intf and parent intf when enabled on both
2) Verify if the juniper header is correct
3) Verify if the inner header is correct
Maintainer : ankitja@juniper.net
"""
return self.verify_juniper_header_testcase(header=2, direction='egress', ipv6=True)
@test.attr(type=['sanity'])
@preposttest_wrapper
def test_juniper_header6_without_header_ingress(self):
"""Validate the presence of no juniper header IPv6 cases
1) Check pkts get mirrored from both sub intf and parent intf when enabled on both
2) Verify if the juniper header is absent
3) Verify if the inner header is correct
Maintainer : ankitja@juniper.net
"""
return self.verify_juniper_header_testcase(header=3, direction='ingress', ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn2vn1_analyzer_cn3vn1(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on different CNs, all in same VN
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn1_analyzer_on_cn3_vn1(ipv6=True)
@test.attr(type=['cb_sanity', 'ci_sanity_WIP', 'sanity', 'quick_sanity'])
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn2vn2_analyzer_cn3vn3(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on different CNs, all in different VNs
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn2_analyzer_on_cn3_vn3(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn2vn1_analyzer_cn3vn2(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on different CNs, src and dst in vn1, analyzer in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn1_analyzer_on_cn3_vn2(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn2vn2_analyzer_cn3vn1(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on different CNs, src and analyzer in vn1, dst in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn2_analyzer_on_cn3_vn1(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn2vn2_analyzer_cn3vn2(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on different CNs, src in vn1, dst and analyzer in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn2_analyzer_on_cn3_vn2(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_with_subintf_src_cn1vn1_dst_cn2vn1_analyzer_cn3vn1(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on different CNs, all in same VN
when src vmi, dst vmi and analyzer vmi are sub interfaces
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn1_analyzer_on_cn3_vn1(sub_intf=True,ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_with_subintf_src_cn1vn1_dst_cn2vn1_analyzer_cn3vn2(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on different CNs, src and dst in vn1, analyzer in vn2
when src vmi, dst vmi and analyzer vmi are sub interfaces
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn1_analyzer_on_cn3_vn2(sub_intf=True,ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn1vn1_analyzer_cn1vn1(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on same CN, all in same VN
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn1_analyzer_on_cn1_vn1(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn1vn2_analyzer_cn1vn3(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on same CN, all in different VNs
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn2_analyzer_on_cn1_vn3(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn1vn1_analyzer_cn1vn2(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on same CN, src and dst in vn1, analyzer in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn1_analyzer_on_cn1_vn2(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn1vn2_analyzer_cn1vn1(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on same CN, src and analyzer in vn1, dst in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn2_analyzer_on_cn1_vn1(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn1vn2_analyzer_cn1vn2(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on same CN, src in vn1, dst and analyzer in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn2_analyzer_on_cn1_vn2(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_with_subintf_src_cn1vn1_dst_cn1vn1_analyzer_cn1vn1(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on same CN, all in same VN
when src vmi, dst vmi and analyzer vmi are sub interfaces
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn1_analyzer_on_cn1_vn1(sub_intf=True,ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_with_subintf_src_cn1vn1_dst_cn1vn1_analyzer_cn1vn2(self):
"""Validate the interface mirroring IPv6
src vm, dst vm and analyzer vm on same CN, src and dst in vn1, analyzer in vn2
when src vmi, dst vmi and analyzer vmi are sub interfaces
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn1_analyzer_on_cn1_vn2(sub_intf=True,ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn1vn1_analyzer_cn2vn1(self):
"""Validate the interface mirroring IPv6
src vm, dst vm on same CN and analyzer vm on different CN, all in same VN
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn1_analyzer_on_cn2_vn1(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn1vn2_analyzer_cn2vn3(self):
"""Validate the interface mirroring IPv6
src vm, dst vm on same CN and analyzer vm on different CN, all in different VNs
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn2_analyzer_on_cn2_vn3(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1_vn1dst_cn1vn1_analyzer_cn2vn2(self):
"""Validate the interface mirroring IPv6
src vm, dst vm on same CN and analyzer vm on different CN, src and dst in vn1, analyzer in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn1_analyzer_on_cn2_vn2(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn1vn2_analyzer_cn2vn1(self):
"""Validate the interface mirroring IPv6
src vm, dst vm on same CN and analyzer vm on different CN, src and analyzer in vn1, dst in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn2_analyzer_on_cn2_vn1(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn1vn2_analyzer_cn2vn2(self):
"""Validate the interface mirroring IPv6
src vm, dst vm on same CN and analyzer vm on different CN, src in vn1, dst and analyzer in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn2_analyzer_on_cn2_vn2(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_with_subintf_src_cn1vn1_dst_cn1vn1_analyzer_cn2vn1(self):
"""Validate the interface mirroring IPv6
src vm, dst vm on same CN and analyzer vm on different CN, all in same VN
when src vmi, dst vmi and analyzer vmi are sub interfaces
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn1_analyzer_on_cn2_vn1(sub_intf=True,ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_with_subintf_src_cn1vn1_dst_cn1vn1_analyzer_cn2vn2(self):
"""Validate the interface mirroring IPv6
src vm, dst vm on same CN and analyzer vm on different CN, src and dst in vn1, analyzer in vn2
when src vmi, dst vmi and analyzer vmi are sub interfaces
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn1_vn1_analyzer_on_cn2_vn2(sub_intf=True,ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn2vn1_analyzer_cn1vn1(self):
"""Validate the interface mirroring IPv6
src vm, analyzer vm on same CN and dst vm on different CN, all in same VN
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn1_analyzer_on_cn1_vn1(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn2vn2_analyzer_cn1vn3(self):
"""Validate the interface mirroring IPv6
src vm, analyzer vm on same CN and dst vm on different CN, all in different VNs
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn2_analyzer_on_cn1_vn3(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn2vn1_analyzer_cn1vn2(self):
"""Validate the interface mirroring IPv6
src vm, analyzer vm on same CN and dst vm on different CN, src and dst in vn1, analyzer in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn1_analyzer_on_cn1_vn2(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn2vn2_analyzer_cn1vn1(self):
"""Validate the interface mirroring IPv6
src vm, analyzer vm on same CN and dst vm on different CN, src and analyzer in vn1, dst in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn2_analyzer_on_cn1_vn1(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn1vn1_dst_cn2vn2_analyzer_cn1vn2(self):
"""Validate the interface mirroring IPv6
src vm, analyzer vm on same CN and dst vm on different CN, src in vn1, dst and analyzer in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn2_analyzer_on_cn1_vn2(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_with_subintf_src_cn1vn1_dst_cn2vn1_analyzer_cn1vn1(self):
"""Validate the interface mirroring IPv6
src vm, analyzer vm on same CN and dst vm on different CN, all in same VN
when src vmi, dst vmi and analyzer vmi are sub interfaces
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn1_analyzer_on_cn1_vn1(sub_intf=True,ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_with_subintf_src_cn1vn1_dst_cn2vn1_analyzer_cn1vn2(self):
"""Validate the interface mirroring IPv6
src vm, analyzer vm on same CN and dst vm on different CN, src and dst in vn1, analyzer in vn2
when src vmi, dst vmi and analyzer vmi are sub interfaces
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn1_vn1_dst_on_cn2_vn1_analyzer_on_cn1_vn2(sub_intf=True,ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn2vn1_dst_cn1vn1_analyzer_cn1vn1(self):
"""Validate the interface mirroring IPv6
dst vm, analyzer vm on same CN and src vm on different CN, all in same VN
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn2_vn1_dst_on_cn1_vn1_analyzer_on_cn1_vn1(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn2vn1_dst_cn1vn2_analyzer_cn1vn3(self):
"""Validate the interface mirroring IPv6
dst vm, analyzer vm on same CN and src vm on different CN, all in different VNs
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn2_vn1_dst_on_cn1_vn2_analyzer_on_cn1_vn3(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn2vn1_dst_cn1vn1_analyzer_cn1vn2(self):
"""Validate the interface mirroring IPv6
dst vm, analyzer vm on same CN and src vm on different CN, src and dst in vn1, analyzer in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn2_vn1_dst_on_cn1_vn1_analyzer_on_cn1_vn2(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn2vn1_dst_cn1vn2_analyzer_cn1vn1(self):
"""Validate the interface mirroring IPv6
dst vm, analyzer vm on same CN and src vm on different CN, src and analyzer in vn1, dst in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn2_vn1_dst_on_cn1_vn2_analyzer_on_cn1_vn1(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_src_cn2vn1_dst_cn1vn2_analyzer_cn1vn2(self):
"""Validate the interface mirroring IPv6
dst vm, analyzer vm on same CN and src vm on different CN, src in vn1, dst and analyzer in vn2
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn2_vn1_dst_on_cn1_vn2_analyzer_on_cn1_vn2(ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_with_subintf_src_cn2vn1_dst_cn1vn1_analyzer_cn1vn1(self):
"""Validate the interface mirroring IPv6
dst vm, analyzer vm on same CN and src vm on different CN, all in same VN
when src vmi, dst vmi and analyzer vmi are sub interfaces
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn2_vn1_dst_on_cn1_vn1_analyzer_on_cn1_vn1(sub_intf=True,ipv6=True)
@preposttest_wrapper
def test_intf_mirror6_with_subintf_src_cn2vn1_dst_cn1vn1_analyzer_cn1vn2(self):
"""Validate the interface mirroring IPv6
dst vm, analyzer vm on same CN and src vm on different CN, src and dst in vn1, analyzer in vn2
when src vmi, dst vmi and analyzer vmi are sub interfaces
Maintainer : ankitja@juniper.net
"""
return self.verify_intf_mirroring_src_on_cn2_vn1_dst_on_cn1_vn1_analyzer_on_cn1_vn2(sub_intf=True,ipv6=True)
if __name__ == '__main__':
unittest.main()
| 38.883669 | 116 | 0.736494 | 2,576 | 17,381 | 4.627717 | 0.04736 | 0.02936 | 0.032883 | 0.081788 | 0.947152 | 0.943461 | 0.932975 | 0.930878 | 0.914017 | 0.914017 | 0 | 0.043545 | 0.212531 | 17,381 | 446 | 117 | 38.970852 | 0.827427 | 0.407399 | 0 | 0.284672 | 0 | 0 | 0.008269 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.29927 | false | 0.007299 | 0.072993 | 0 | 0.664234 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
721558021b8c29db99a6749e5f1c79c3d8063d02 | 95 | py | Python | modisco/seqlet_embedding/__init__.py | XiaotingChen/tfmodisco | 17cbafe806942304a02e8134fe10224bdff38b0c | [
"MIT"
] | null | null | null | modisco/seqlet_embedding/__init__.py | XiaotingChen/tfmodisco | 17cbafe806942304a02e8134fe10224bdff38b0c | [
"MIT"
] | null | null | null | modisco/seqlet_embedding/__init__.py | XiaotingChen/tfmodisco | 17cbafe806942304a02e8134fe10224bdff38b0c | [
"MIT"
] | null | null | null | from . import gapped_kmer
from . import advanced_gapped_kmer
from . import onehot_model_output
| 23.75 | 34 | 0.842105 | 14 | 95 | 5.357143 | 0.571429 | 0.4 | 0.373333 | 0.533333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.126316 | 95 | 3 | 35 | 31.666667 | 0.903614 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
9d518d5924f9690aaa69ee0cbb24ac07dd65fdae | 745 | py | Python | rastervision/command/__init__.py | carderne/raster-vision | 915fbcd3263d8f2193e65c2cd0eb53e050a47a01 | [
"Apache-2.0"
] | 4 | 2019-03-11T12:38:15.000Z | 2021-04-06T14:57:52.000Z | rastervision/command/__init__.py | carderne/raster-vision | 915fbcd3263d8f2193e65c2cd0eb53e050a47a01 | [
"Apache-2.0"
] | null | null | null | rastervision/command/__init__.py | carderne/raster-vision | 915fbcd3263d8f2193e65c2cd0eb53e050a47a01 | [
"Apache-2.0"
] | 1 | 2020-04-27T15:21:53.000Z | 2020-04-27T15:21:53.000Z | # flake8: noqa
from rastervision.command.command import *
from rastervision.command.command_config import *
from rastervision.command.analyze_command import *
from rastervision.command.analyze_command_config import *
from rastervision.command.chip_command import *
from rastervision.command.chip_command_config import *
from rastervision.command.train_command import *
from rastervision.command.train_command_config import *
from rastervision.command.predict_command import *
from rastervision.command.predict_command_config import *
from rastervision.command.eval_command import *
from rastervision.command.eval_command_config import *
from rastervision.command.bundle_command import *
from rastervision.command.bundle_command_config import *
| 43.823529 | 57 | 0.860403 | 91 | 745 | 6.835165 | 0.142857 | 0.360129 | 0.517685 | 0.606109 | 0.927653 | 0.869775 | 0 | 0 | 0 | 0 | 0 | 0.00146 | 0.080537 | 745 | 16 | 58 | 46.5625 | 0.906569 | 0.016107 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
19c7278483419fa55f0fd717246391b02538fc43 | 31,479 | py | Python | epm_client/apis/cluster_api.py | tub-elastest/epm-client-python | 4e708a7e8c80334337d2f05c0baec46fdd581b8f | [
"Apache-2.0"
] | 1 | 2019-05-23T12:51:18.000Z | 2019-05-23T12:51:18.000Z | epm_client/apis/cluster_api.py | tub-elastest/epm-client-python | 4e708a7e8c80334337d2f05c0baec46fdd581b8f | [
"Apache-2.0"
] | null | null | null | epm_client/apis/cluster_api.py | tub-elastest/epm-client-python | 4e708a7e8c80334337d2f05c0baec46fdd581b8f | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
EPM REST API
REST API description of the ElasTest Platform Manager Module.
OpenAPI spec version: 0.1.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ClusterApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_worker(self, id, machine_id, **kwargs):
"""
Adds a worker to the cluster.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_worker(id, machine_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of Cluster (required)
:param str machine_id: The ID of either a Worker or a VDU, which will be added to the cluster (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_worker_with_http_info(id, machine_id, **kwargs)
else:
(data) = self.add_worker_with_http_info(id, machine_id, **kwargs)
return data
def add_worker_with_http_info(self, id, machine_id, **kwargs):
"""
Adds a worker to the cluster.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_worker_with_http_info(id, machine_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of Cluster (required)
:param str machine_id: The ID of either a Worker or a VDU, which will be added to the cluster (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'machine_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_worker" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `add_worker`")
# verify the required parameter 'machine_id' is set
if ('machine_id' not in params) or (params['machine_id'] is None):
raise ValueError("Missing the required parameter `machine_id` when calling `add_worker`")
resource_path = '/cluster/{id}/add/{machineId}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'machine_id' in params:
path_params['machineId'] = params['machine_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def create_cluster(self, cluster_from_resource_group, **kwargs):
"""
Creates a new cluster.
Receives an Identifier for a ResourceGroup and an array of types to setup the Resource Group as a cluster.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster(cluster_from_resource_group, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ClusterFromResourceGroup cluster_from_resource_group: Body to create Cluster from ResourceGroup (required)
:return: Cluster
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_cluster_with_http_info(cluster_from_resource_group, **kwargs)
else:
(data) = self.create_cluster_with_http_info(cluster_from_resource_group, **kwargs)
return data
def create_cluster_with_http_info(self, cluster_from_resource_group, **kwargs):
"""
Creates a new cluster.
Receives an Identifier for a ResourceGroup and an array of types to setup the Resource Group as a cluster.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_with_http_info(cluster_from_resource_group, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ClusterFromResourceGroup cluster_from_resource_group: Body to create Cluster from ResourceGroup (required)
:return: Cluster
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_from_resource_group']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_from_resource_group' is set
if ('cluster_from_resource_group' not in params) or (params['cluster_from_resource_group'] is None):
raise ValueError("Missing the required parameter `cluster_from_resource_group` when calling `create_cluster`")
resource_path = '/cluster/create'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_from_resource_group' in params:
body_params = params['cluster_from_resource_group']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Cluster',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def delete_cluster(self, id, **kwargs):
"""
Deletes a Cluster.
Deletes the Cluster that matches with a given ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_cluster(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of Cluster (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_cluster_with_http_info(id, **kwargs)
else:
(data) = self.delete_cluster_with_http_info(id, **kwargs)
return data
def delete_cluster_with_http_info(self, id, **kwargs):
"""
Deletes a Cluster.
Deletes the Cluster that matches with a given ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_cluster_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of Cluster (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_cluster" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_cluster`")
resource_path = '/cluster/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['*/*'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_all_clusters(self, **kwargs):
"""
Returns all clusters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_clusters(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[Cluster]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_all_clusters_with_http_info(**kwargs)
else:
(data) = self.get_all_clusters_with_http_info(**kwargs)
return data
def get_all_clusters_with_http_info(self, **kwargs):
"""
Returns all clusters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_clusters_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[Cluster]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_clusters" % key
)
params[key] = val
del params['kwargs']
resource_path = '/cluster'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Cluster]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def register_cluster(self, body, **kwargs):
"""
Registers the cluster and saves the information.
This registers a cluster with the information provided.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.register_cluster(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Cluster body: cluster in a json (required)
:return: Cluster
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.register_cluster_with_http_info(body, **kwargs)
else:
(data) = self.register_cluster_with_http_info(body, **kwargs)
return data
def register_cluster_with_http_info(self, body, **kwargs):
"""
Registers the cluster and saves the information.
This registers a cluster with the information provided.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.register_cluster_with_http_info(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Cluster body: cluster in a json (required)
:return: Cluster
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method register_cluster" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `register_cluster`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/cluster', 'POST',
path_params,
query_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Cluster',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_node(self, id, worker_id, **kwargs):
"""
Removes a worker to the cluster.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_node(id, worker_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of Cluster (required)
:param str worker_id: The ID of a Worker (required)
:return: Cluster
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.remove_node_with_http_info(id, worker_id, **kwargs)
else:
(data) = self.remove_node_with_http_info(id, worker_id, **kwargs)
return data
def remove_node_with_http_info(self, id, worker_id, **kwargs):
"""
Removes a worker to the cluster.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_node_with_http_info(id, worker_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of Cluster (required)
:param str worker_id: The ID of a Worker (required)
:return: Cluster
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'worker_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_node" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `remove_node`")
# verify the required parameter 'worker_id' is set
if ('worker_id' not in params) or (params['worker_id'] is None):
raise ValueError("Missing the required parameter `worker_id` when calling `remove_node`")
resource_path = '/cluster/{id}/remove/{workerId}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'worker_id' in params:
path_params['workerId'] = params['worker_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Cluster',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def set_up_cluster(self, id, type, **kwargs):
"""
Sets up the specified cluster to install the specified technology and connected it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_up_cluster(id, type, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of Cluster (required)
:param str type: type of technology (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.set_up_cluster_with_http_info(id, type, **kwargs)
else:
(data) = self.set_up_cluster_with_http_info(id, type, **kwargs)
return data
def set_up_cluster_with_http_info(self, id, type, **kwargs):
"""
Sets up the specified cluster to install the specified technology and connected it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_up_cluster_with_http_info(id, type, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ID of Cluster (required)
:param str type: type of technology (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_up_cluster" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_up_cluster`")
# verify the required parameter 'type' is set
if ('type' not in params) or (params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `set_up_cluster`")
resource_path = '/cluster/{id}/{type}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'type' in params:
path_params['type'] = params['type']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 39.696091 | 122 | 0.563137 | 3,301 | 31,479 | 5.16601 | 0.072099 | 0.065678 | 0.022987 | 0.029555 | 0.896499 | 0.854161 | 0.84554 | 0.83651 | 0.813288 | 0.80672 | 0 | 0.000494 | 0.356587 | 31,479 | 792 | 123 | 39.746212 | 0.841463 | 0.348772 | 0 | 0.711172 | 1 | 0 | 0.149179 | 0.037159 | 0 | 0 | 0 | 0 | 0 | 1 | 0.040872 | false | 0 | 0.019074 | 0 | 0.119891 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
19db969984304dacf6f9eed7e0a756452ba56bc0 | 33,108 | py | Python | tccli/services/ocr/v20181119/help.py | zyh911/tencentcloud-cli | dfc5dbd660d4c60d265921c4edc630091478fc41 | [
"Apache-2.0"
] | null | null | null | tccli/services/ocr/v20181119/help.py | zyh911/tencentcloud-cli | dfc5dbd660d4c60d265921c4edc630091478fc41 | [
"Apache-2.0"
] | null | null | null | tccli/services/ocr/v20181119/help.py | zyh911/tencentcloud-cli | dfc5dbd660d4c60d265921c4edc630091478fc41 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
DESC = "ocr-2018-11-19"
INFO = {
"InsuranceBillOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持病案首页、费用清单、结算单、医疗发票四种保险理赔单据的文本识别和结构化输出。"
},
"GeneralBasicOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
},
{
"name": "Scene",
"desc": "保留字段。"
},
{
"name": "LanguageType",
"desc": "识别语言类型。\n支持自动识别语言类型,同时支持自选语言种类,默认中英文混合(zh)。\n可选值:\nzh\\auto\\jap\\kor\\\nspa\\fre\\ger\\por\\\nvie\\may\\rus\\ita\\\nhol\\swe\\fin\\dan\\\nnor\\hun\\tha\\lat\n可选值分别表示:\n中英文混合、自动识别、日语、韩语、\n西班牙语、法语、德语、葡萄牙语、\n越南语、马来语、俄语、意大利语、\n荷兰语、瑞典语、芬兰语、丹麦语、\n挪威语、匈牙利语、泰语、拉丁语系。"
}
],
"desc": "本接口支持多场景、任意版面下整图文字的识别。支持自动识别语言类型,同时支持自选语言种类(推荐),除中英文外,支持日语、韩语、西班牙语、法语、德语、葡萄牙语、越南语、马来语、俄语、意大利语、荷兰语、瑞典语、芬兰语、丹麦语、挪威语、匈牙利语、泰语等多种语言。应用场景包括:印刷文档识别、网络图片识别、广告图文字识别、街景店招识别、菜单识别、视频标题识别、头像文字识别等。"
},
"EnterpriseLicenseOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持智能化识别各类企业登记证书、许可证书、企业执照、三证合一类证书,结构化输出统一社会信用代码、公司名称、法定代表人、公司地址、注册资金、企业类型、经营范围等关键字段。"
},
"BusinessCardOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 7M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 7M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
},
{
"name": "Config",
"desc": "可选字段,根据需要选择是否请求对应字段。\n目前支持的字段为:\nRetImageType-“PROPROCESS” 图像预处理,string 类型。\n图像预处理功能为,检测图片倾斜的角度,将原本倾斜的图片围绕中心点转正,最终输出一张正的名片抠图。\n\nSDK 设置方式参考:\nConfig = Json.stringify({\"RetImageType\":\"PROPROCESS\"})\nAPI 3.0 Explorer 设置方式参考:\nConfig = {\"RetImageType\":\"PROPROCESS\"}"
}
],
"desc": "本接口支持名片各字段的自动定位与识别,包含姓名、电话、手机号、邮箱、公司、部门、职位、网址、地址、QQ、微信、MSN等。"
},
"IDCardOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。要求图片经Base64编码后不超过 7M,分辨率建议500*800以上,支持PNG、JPG、JPEG、BMP格式。建议卡片部分占据图片2/3以上。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。要求图片经Base64编码后不超过 7M,分辨率建议500*800以上,支持PNG、JPG、JPEG、BMP格式。建议卡片部分占据图片2/3以上。\n建议图片存储于腾讯云,可保障更高的下载速度和稳定性。"
},
{
"name": "CardSide",
"desc": "FRONT:身份证有照片的一面(人像面),\nBACK:身份证有国徽的一面(国徽面),\n该参数如果不填,将为您自动判断身份证正反面。"
},
{
"name": "Config",
"desc": "以下可选字段均为bool 类型,默认false:\nCropIdCard,身份证照片裁剪(去掉证件外多余的边缘、自动矫正拍摄角度)\nCropPortrait,人像照片裁剪(自动抠取身份证头像区域)\nCopyWarn,复印件告警\nBorderCheckWarn,边框和框内遮挡告警\nReshootWarn,翻拍告警\nDetectPsWarn,PS检测告警\nTempIdWarn,临时身份证告警\nInvalidDateWarn,身份证有效日期不合法告警\nQuality,图片质量分数(评价图片的模糊程度)\n\nSDK 设置方式参考:\nConfig = Json.stringify({\"CropIdCard\":true,\"CropPortrait\":true})\nAPI 3.0 Explorer 设置方式参考:\nConfig = {\"CropIdCard\":true,\"CropPortrait\":true}"
}
],
"desc": "本接口支持中国大陆居民二代身份证正反面所有字段的识别,包括姓名、性别、民族、出生日期、住址、公民身份证号、签发机关、有效期限,识别准确度达到99%以上。\n\n另外,本接口还支持多种增值能力,满足不同场景的需求。如身份证照片、人像照片的裁剪功能,同时具备9种告警功能,如下表所示。\n\n<table style=\"width:650px\">\n <thead>\n <tr>\n <th width=\"150\">增值能力</th>\n <th width=\"500\">能力项</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <td rowspan=\"2\">裁剪功能</td>\n <td>身份证照片裁剪(去掉证件外多余的边缘、自动矫正拍摄角度)</td>\n </tr>\n <tr>\n <td>人像照片裁剪(自动抠取身份证头像区域)</td>\n </tr>\n <tr>\n <td rowspan=\"9\">告警功能</td>\n <td>身份证有效日期不合法告警</td>\n </tr>\n <tr>\n <td>身份证边框不完整告警</td>\n </tr>\n <tr>\n <td>身份证复印件告警</td>\n </tr>\n <tr>\n <td>身份证翻拍告警</td>\n </tr>\n <tr>\n <td>身份证框内遮挡告警</td>\n </tr>\n <tr>\n <td>临时身份证告警</td>\n </tr>\n <tr>\n <td>身份证 PS 告警</td>\n </tr>\n <tr>\n <td>图片模糊告警</td>\n </tr>\n </tbody>\n </table>"
},
"PassportOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。要求图片经Base64编码后不超过 7M,分辨率建议500*800以上,支持PNG、JPG、JPEG、BMP格式。建议卡片部分占据图片2/3以上。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。要求图片经Base64编码后不超过 7M,分辨率建议500*800以上,支持PNG、JPG、JPEG、BMP格式。建议卡片部分占据图片2/3以上。\n建议图片存储于腾讯云,可保障更高的下载速度和稳定性。"
},
{
"name": "Type",
"desc": "默认填写CN\n支持中国大陆地区护照。"
}
],
"desc": "本接口支持中国大陆地区护照个人资料页多个字段的检测与识别。已支持字段包括英文姓名、中文姓名、国家码、护照号、出生地、出生日期、国籍英文、性别英文、有效期、签发地点英文、签发日期、持证人签名、护照机读码(MRZ码)等。"
},
"MLIDCardOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。( 中国地区之外不支持这个字段 )\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
},
{
"name": "RetImage",
"desc": "是否返回图片"
}
],
"desc": "本接口支持马来西亚身份证识别,识别字段包括身份证号、姓名、性别、地址;具备身份证人像照片的裁剪功能和翻拍、复印件告警功能。\n本接口暂未完全对外开放,如需咨询,请[联系商务](https://cloud.tencent.com/about/connect)\n"
},
"QrcodeOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持条形码和二维码的识别(包括 DataMatrix 和 PDF417)。\n本接口暂未完全对外开放,如需咨询,请[联系商务](https://cloud.tencent.com/about/connect) "
},
"GeneralAccurateOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持图像整体文字的检测和识别,返回文字框位置与文字内容。相比通用印刷体识别接口,高精度版在英文、数字、小字、模糊字、倾斜文本行等困难场景下,准确率和召回率更高。"
},
"MixedInvoiceDetect": {
"params": [
{
"name": "ReturnImage",
"desc": "是否需要返回裁剪后的图片。"
},
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持多张、多类型票据的混合检测和自动分类,返回对应票据类型。目前已支持增值税发票、增值税发票(卷票)、定额发票、通用机打发票、购车发票、火车票、出租车发票、机票行程单、汽车票、轮船票、过路过桥费发票、酒店账单、客运限额发票、购物小票、完税证明共15种票据。"
},
"VinOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持图片内车辆识别代号(VIN)的检测和识别。"
},
"MLIDPassportOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。要求图片经Base64编码后不超过 7M,分辨率建议500*800以上,支持PNG、JPG、JPEG、BMP格式。建议卡片部分占据图片2/3以上。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "RetImage",
"desc": "是否返回图片"
}
],
"desc": "本接口支持中国港澳台地区以及其他国家、地区的护照。识别字段包括护照ID、姓名、出生日期、性别、有效期、发行国、国籍;具备护照人像照片的裁剪功能和翻拍、复印件告警功能。\n本接口暂未完全对外开放,如需咨询,请[联系商务](https://cloud.tencent.com/about/connect)"
},
"VatRollInvoiceOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持对增值税发票(卷票)的发票代码、发票号码、日期、校验码、合计金额(小写)等关键字段的识别。"
},
"QuotaInvoiceOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持定额发票的发票号码、发票代码、金额(大小写)、发票消费类型、地区及是否有公司印章等关键字段的识别。"
},
"GeneralFastOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持图片中整体文字的检测和识别,返回文字框位置与文字内容。相比通用印刷体识别接口,识别速度更快、支持的 QPS 更高。"
},
"PropOwnerCertOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持房产证关键字段的识别,包括房地产权利人、共有情况、登记时间、规划用途、房屋性质、房屋坐落等。"
},
"BizLicenseOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 7M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 7M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持快速精准识别营业执照上的字段,包括注册号、公司名称、经营场所、主体类型、法定代表人、注册资金、组成形式、成立日期、营业期限和经营范围等字段。"
},
"GeneralHandwritingOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
},
{
"name": "Scene",
"desc": "场景字段,默认不用填写。\n可选值:only_hw 表示只输出手写体识别结果,过滤印刷体。"
}
],
"desc": "本接口支持图片内手写体文字的检测和识别,针对手写字体无规则、字迹潦草、模糊等特点进行了识别能力的增强。"
},
"InvoiceGeneralOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持对通用机打发票的发票代码、发票号码、日期、购买方识别号、销售方识别号、校验码、小写金额等关键字段的识别。"
},
"VatInvoiceOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持增值税专用发票、增值税普通发票、增值税电子发票全字段的内容检测和识别,包括发票代码、发票号码、开票日期、合计金额、校验码、税率、合计税额、价税合计、购买方识别号、复核、销售方识别号、开票人、密码区1、密码区2、密码区3、密码区4、发票名称、购买方名称、销售方名称、服务名称、备注、规格型号、数量、单价、金额、税额、收款人等字段。"
},
"WaybillOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持市面上主流版式电子运单的识别,包括收件人和寄件人的姓名、电话、地址以及运单号等字段。"
},
"FlightInvoiceOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持机票行程单关键字段的识别,包括姓名、身份证件号码、航班号、票价 、合计、电子客票号码、填开日期等。"
},
"PermitOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持对卡式港澳台通行证的识别,包括签发地点、签发机关、有效期限、性别、出生日期、英文姓名、姓名、证件号等字段。"
},
"OrgCodeCertOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持组织机构代码证关键字段的识别,包括代码、有效期、地址、机构名称等。"
},
"FinanBillSliceOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持常见银行票据的自动分类和识别。切片识别包括金融行业常见票据的重要切片字段识别,包括金额、账号、日期、凭证号码等。(金融票据切片:金融票据中待识别字段及其周围局部区域的裁剪图像。)"
},
"BusInvoiceOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持识别公路汽车客票的发票代码、发票号码、日期、姓名、票价等字段。"
},
"TableOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持图片内表格文档的检测和识别,返回每个单元格的文字内容,支持将识别结果保存为 Excel 格式。"
},
"HmtResidentPermitOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
},
{
"name": "CardSide",
"desc": "FRONT:有照片的一面(人像面),\nBACK:无照片的一面(国徽面),\n该参数如果不填或填错,将为您自动判断正反面。"
}
],
"desc": "港澳台居住证OCR支持港澳台居住证正反面全字段内容检测识别功能,包括姓名、性别、出生日期、地址、身份证ID、签发机关、有效期限、签发次数、通行证号码关键字段识别。可以应用于港澳台居住证信息有效性校验场景,例如银行开户、用户注册等场景。"
},
"ArithmeticOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持作业算式题目的自动识别,目前覆盖 K12 学力范围内的 14 种题型,包括加减乘除四则运算、分数四则运算、竖式四则运算、脱式计算等。"
},
"TollInvoiceOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持对过路过桥费发票的发票代码、发票号码、日期、小写金额等关键字段的识别。"
},
"EstateCertOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持不动产权证关键字段的识别,包括使用期限、面积、用途、权利性质、权利类型、坐落、共有情况、权利人、权利其他状况等。\n\n\n"
},
"FinanBillOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持常见银行票据的自动分类和识别。整单识别包括支票(含现金支票、普通支票、转账支票),承兑汇票(含银行承兑汇票、商业承兑汇票)以及进账单等,适用于中国人民银行印发的 2010 版银行票据凭证版式(银发[2010]299 号)。"
},
"TrainTicketOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持火车票全字段的识别,包括编号、票价、姓名、座位号、出发时间、出发站、到达站、车次、席别、发票类型及序列号等。\n"
},
"TextDetect": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口通过检测图片中的文字信息特征,快速判断图片中有无文字并返回判断结果,帮助用户过滤无文字的图片。"
},
"GeneralEfficientOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持多场景、任意版面下整图文字的识别。相较于“通用印刷体识别”接口,精简版接口在准召率有一定损失的情况下,耗时更短。适用于对接口耗时较为敏感的客户。"
},
"TaxiInvoiceOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持出租车发票关键字段的识别,包括发票号码、发票代码、金额、日期、上下车时间、里程、车牌号、发票类型及所属地区等字段。"
},
"ResidenceBookletOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持居民户口簿户主页及成员页关键字段的识别,包括姓名、户别、地址、籍贯、身份证号码等。"
},
"InstitutionOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持事业单位法人证书关键字段识别,包括注册号、有效期、住所、名称、法定代表人等。"
},
"EnglishOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持图像英文文字的检测和识别,返回文字框位置与文字内容。支持多场景、任意版面下的英文、字母、数字和常见字符的识别,同时覆盖英文印刷体和英文手写体识别。"
},
"VehicleRegCertOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持国内机动车登记证书主要字段的结构化识别,包括机动车所有人、身份证明名称、号码、车辆型号、车辆识别代号、发动机号、制造厂名称等。"
},
"BankCardOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 7M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 7M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持对中国大陆主流银行卡的卡号、银行信息、有效期等关键字段的检测与识别。"
},
"CarInvoiceOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持机动车销售统一发票和二手车销售统一发票的识别,包括发票号码、发票代码、合计金额、合计税额等二十多个字段。"
},
"DutyPaidProofOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持对完税证明的税号、纳税人识别号、纳税人名称、金额合计大写、金额合计小写、填发日期、税务机关、填票人等关键字段的识别。"
},
"MainlandPermitOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
},
{
"name": "RetProfile",
"desc": "是非返回头像。默认不返回。"
}
],
"desc": "智能识别并结构化港澳台居民来往内地通行证正面全部字段,包含中文姓名、英文姓名、性别、出生日期、签发机关、有效期限、证件号、签发地点、签发次数、证件类别。"
},
"FormulaOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持识别主流初高中数学符号和公式,返回公式的 Latex 格式文本。"
},
"LicensePlateOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持对中国大陆机动车车牌的自动定位和识别,返回地域编号和车牌号信息。"
},
"ShipInvoiceOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
}
],
"desc": "本接口支持识别轮船票的发票代码、发票号码、日期、姓名、票价等字段。"
},
"MixedInvoiceOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
},
{
"name": "Types",
"desc": "需要识别的票据类型列表,为空或不填表示识别全部类型。\n0:出租车发票\n1:定额发票\n2:火车票\n3:增值税发票\n5:机票行程单\n8:通用机打发票\n9:汽车票\n10:轮船票\n11:增值税发票(卷票 )\n12:购车发票\n13:过路过桥费发票"
}
],
"desc": "本接口支持多张、多类型票据的混合识别,系统自动实现分割、分类和识别,同时支持自选需要识别的票据类型。目前已支持增值税发票、增值税发票(卷票)、定额发票、通用机打发票、购车发票、火车票、出租车发票、机票行程单、汽车票、轮船票、过路过桥费发票共11种票据。"
},
"DriverLicenseOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。要求图片经Base64编码后不超过 7M,分辨率建议500*800以上,支持PNG、JPG、JPEG、BMP格式。建议卡片部分占据图片2/3以上。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。要求图片经Base64编码后不超过 7M,分辨率建议500*800以上,支持PNG、JPG、JPEG、BMP格式。建议卡片部分占据图片2/3以上。图片下载时间不超过 3 秒。\n建议图片存储于腾讯云,可保障更高的下载速度和稳定性。"
},
{
"name": "CardSide",
"desc": "FRONT 为驾驶证主页正面(有红色印章的一面),\nBACK 为驾驶证副页正面(有档案编号的一面)。"
}
],
"desc": "本接口支持驾驶证主页和副页所有字段的自动定位与识别,重点字段的识别准确度达到99%以上。\n\n驾驶证主页:包括证号、姓名、性别、国籍、住址、出生日期、初次领证日期、准驾车型、有效期限。\n\n驾驶证副页:包括证号、姓名、档案编号、记录。\n\n另外,本接口还支持复印件、翻拍和PS告警功能。"
},
"EduPaperOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经Base64编码后不超过 3M。图片下载时间不超过 3 秒。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。\n支持的图片格式:PNG、JPG、JPEG,暂不支持 GIF 格式。\n支持的图片大小:所下载图片经 Base64 编码后不超过 3M。图片下载时间不超过 3 秒。\n图片存储于腾讯云的 Url 可保障更高的下载速度和稳定性,建议图片存储于腾讯云。\n非腾讯云存储的 Url 速度和稳定性可能受一定影响。"
},
{
"name": "Config",
"desc": "扩展配置信息。\n配置格式:{\"option1\":value1,\"option2\":value2}\n可配置信息:\n 参数名称 是否必选 类型 可选值 默认值 描述\n task_type 否 Int32 [0,1] 1 用于选择任务类型: 0: 关闭版式分析与处理 1: 开启版式分析处理\n is_structuralization 否 Bool false\\true true 用于选择是否结构化输出:false:返回包体返回通用输出 true:返回包体同时返回通用和结构化输出\n if_readable_format 否 Bool false\\true false 是否按照版式整合通用文本/公式输出结果\n例子:\n{\"task_type\": 1,\"is_structuralization\": true,\"if_readable_format\": true}"
}
],
"desc": "本接口支持数学试题内容的识别和结构化输出,包括通用文本解析和小学/初中/高中数学公式解析能力(包括91种题型,180种符号)。"
},
"VehicleLicenseOCR": {
"params": [
{
"name": "ImageBase64",
"desc": "图片的 Base64 值。要求图片经Base64编码后不超过 7M,分辨率建议500*800以上,支持PNG、JPG、JPEG、BMP格式。建议卡片部分占据图片2/3以上。\n图片的 ImageUrl、ImageBase64 必须提供一个,如果都提供,只使用 ImageUrl。"
},
{
"name": "ImageUrl",
"desc": "图片的 Url 地址。要求图片经Base64编码后不超过 7M,分辨率建议500*800以上,支持PNG、JPG、JPEG、BMP格式。建议卡片部分占据图片2/3以上。图片下载时间不超过 3 秒。\n建议图片存储于腾讯云,可保障更高的下载速度和稳定性。"
},
{
"name": "CardSide",
"desc": "FRONT 为行驶证主页正面(有红色印章的一面),\nBACK 为行驶证副页正面(有号码号牌的一面)。"
}
],
"desc": "本接口支持行驶证主页和副页所有字段的自动定位与识别。\n\n行驶证主页:车牌号码、车辆类型、所有人、住址、使用性质、品牌型号、识别代码、发动机号、注册日期、发证日期、发证单位。\n\n行驶证副页:号牌号码、档案编号、核定载人数、总质量、整备质量、核定载质量、外廓尺寸、准牵引总质量、备注、检验记录。\n\n另外,本接口还支持复印件、翻拍和PS告警功能。"
}
} | 45.540578 | 1,066 | 0.639422 | 3,794 | 33,108 | 5.577491 | 0.183448 | 0.033411 | 0.048863 | 0.078257 | 0.740939 | 0.737914 | 0.730495 | 0.725816 | 0.725816 | 0.725816 | 0 | 0.032665 | 0.204784 | 33,108 | 727 | 1,067 | 45.540578 | 0.77108 | 0.000634 | 0 | 0.433884 | 0 | 0.174931 | 0.761259 | 0.334583 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.002755 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c208729f2ca7f74e699374143c2848ad95a7486e | 59 | py | Python | OMASS4/Read_Noise_Calculation/__init__.py | DBernardes/OMASS4 | 30d2edc961463253cc120bc8ca1d74a0a73d922d | [
"MIT"
] | null | null | null | OMASS4/Read_Noise_Calculation/__init__.py | DBernardes/OMASS4 | 30d2edc961463253cc120bc8ca1d74a0a73d922d | [
"MIT"
] | null | null | null | OMASS4/Read_Noise_Calculation/__init__.py | DBernardes/OMASS4 | 30d2edc961463253cc120bc8ca1d74a0a73d922d | [
"MIT"
] | null | null | null | from .read_noise_calculation import Read_Noise_Calculation
| 29.5 | 58 | 0.915254 | 8 | 59 | 6.25 | 0.625 | 0.36 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.067797 | 59 | 1 | 59 | 59 | 0.909091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
c222dc115fb853728ce1133fcd8cdca7b41c7e12 | 23,361 | py | Python | pytest/testPyTX.py | gurnec/BitcoinArmory | 35320e791ccd3f9faa837b49b9d3e9ddc578e2ae | [
"MIT"
] | 1 | 2021-01-02T14:40:48.000Z | 2021-01-02T14:40:48.000Z | pytest/testPyTX.py | gurnec/BitcoinArmory | 35320e791ccd3f9faa837b49b9d3e9ddc578e2ae | [
"MIT"
] | null | null | null | pytest/testPyTX.py | gurnec/BitcoinArmory | 35320e791ccd3f9faa837b49b9d3e9ddc578e2ae | [
"MIT"
] | 1 | 2021-01-02T14:38:06.000Z | 2021-01-02T14:38:06.000Z | '''
Created on Aug 4, 2013
@author: Andy
'''
import sys
sys.path.append('..')
import unittest
from armoryengine.ArmoryUtils import hex_to_binary, binary_to_hex, hex_to_int, \
ONE_BTC
from armoryengine.BinaryUnpacker import BinaryUnpacker
from armoryengine.Block import PyBlock
from armoryengine.PyBtcAddress import PyBtcAddress
from armoryengine.Script import PyScriptProcessor
from armoryengine.Transaction import PyTx, PyTxIn, PyOutPoint, PyTxOut, \
PyCreateAndSignTx, getMultisigScriptInfo, BlockComponent,\
PyCreateAndSignTx_old
from pytest.Tiab import TiabTest
# Unserialize an reserialize
tx1raw = hex_to_binary( \
'01000000016290dce984203b6a5032e543e9e272d8bce934c7de4d15fa0fe44d'
'd49ae4ece9010000008b48304502204f2fa458d439f957308bca264689aa175e'
'3b7c5f78a901cb450ebd20936b2c500221008ea3883a5b80128e55c9c6070aa6'
'264e1e0ce3d18b7cd7e85108ce3d18b7419a0141044202550a5a6d3bb81549c4'
'a7803b1ad59cdbba4770439a4923624a8acfc7d34900beb54a24188f7f0a4068'
'9d905d4847cc7d6c8d808a457d833c2d44ef83f76bffffffff0242582c0a0000'
'00001976a914c1b4695d53b6ee57a28647ce63e45665df6762c288ac80d1f008'
'000000001976a9140e0aec36fe2545fb31a41164fb6954adcd96b34288ac00000000')
tx2raw = hex_to_binary( \
'0100000001f658dbc28e703d86ee17c9a2d3b167a8508b082fa0745f55be5144'
'a4369873aa010000008c49304602210041e1186ca9a41fdfe1569d5d807ca7ff'
'6c5ffd19d2ad1be42f7f2a20cdc8f1cc0221003366b5d64fe81e53910e156914'
'091d12646bc0d1d662b7a65ead3ebe4ab8f6c40141048d103d81ac9691cf13f3'
'fc94e44968ef67b27f58b27372c13108552d24a6ee04785838f34624b294afee'
'83749b64478bb8480c20b242c376e77eea2b3dc48b4bffffffff0200e1f50500'
'0000001976a9141b00a2f6899335366f04b277e19d777559c35bc888ac40aeeb'
'02000000001976a9140e0aec36fe2545fb31a41164fb6954adcd96b34288ac00000000')
multiTx1raw = hex_to_binary( \
'0100000004a14fd232f045f0c9f28c6848a22fee393152e901eaa61a9f18438b3ba05c6035010000008a47304402201b19808aa145dbebf775ed11a15d763eaa2'
'b5df92b20f9835f62c72404918b1b02205aea3e816ac6ac7545254b9c34a00c37f20024793bbe0a64958934343f3c577b014104c0f3d0a4920bb6825769dd6ae1'
'e36b0ac36581639d605241cdd548c4ef5d46cda5ac21723d478041a63118f192fdb730c4cf76106789824cd68879a7afeb5288ffffffffa14fd232f045f0c9f28'
'c6848a22fee393152e901eaa61a9f18438b3ba05c6035000000008b4830450220796307d9787b892c8b1ada8511d99e855ea3099e1a76ce0f7aa783ed352a6e59'
'022100fc38d05d7dfbe51e28c36d854dd0dcc938d60a3e406573c3dc39253694d14a12014104630aaf9d5c8d757cb5759428d4075911a2b2ff13dd7208ad7ea1d'
'1682738a7138be93ee526c9d774e0dea03fa2a5fbb68043259ddfb942c0763f9b636b40c43fffffffffa14fd232f045f0c9f28c6848a22fee393152e901eaa61a'
'9f18438b3ba05c6035020000008c493046022100cb423b63197ef3cdbfaed69f61aac59755f0025bd6d7a9d3c78024d897ebcf94022100f3ad14804a3c8042387'
'eca9b9053abe99e12651a795cae7f546b08e1c08c6464014104649694df12dcd7fdb5a8c54c376b904bd7337891d865b8d306beb5d2e5d8fdf2a537d6f9df65ff'
'44eb0b6042ebfdf9e338bff7f4afacb359dd6c71aea7b9b92dffffffffa14fd232f045f0c9f28c6848a22fee393152e901eaa61a9f18438b3ba05c60350300000'
'08b483045022100fb9f4ddc68497a266362d489abf05184909a2b99aa64803061c88597b725877802207f39cf5a90a305aee45f365cf9e2d258e37cab4da6c123'
'aa287635cd1fd40dd001410438252055130f3dd242201684931550c4065efc1b87c48192f75868f747e2a9df9a700fed7e90068bd395c58680bd593780c8119e7'
'981dae08c345588f120fcb4ffffffff02e069f902000000001976a914ad00cf2b893e132c33a79a22ae938d6309c780a488ac80f0fa02000000001976a9143155'
'18b646ea65ad148ee1e2f0360233617447e288ac00000000')
multiTx2raw = hex_to_binary( \
'0100000004a14fd232f045f0c9f28c6848a22fee393152e901eaa61a9f18438b3ba05c6035010000008a47304402201b19808aa145dbebf775ed11a15d763eaa2'
'b5df92b20f9835f62c72404918b1b02205aea3e816ac6ac7545254b9c34a00c37f20024793bbe0a64958934343f3c577b014104c0f3d0a4920bb6825769dd6ae1'
'e36b0ac36581639d605241cdd548c4ef5d46cda5ac21723d478041a63118f192fdb730c4cf76106789824cd68879a7afeb5288ffffffffa14fd232f045f0c9f28'
'c6848a22fee393152e901eaa61a9f18438b3ba05c6035000000008b4830450220796307d9787b892c8b1ada8511d99e855ea3099e1a76ce0f7aa783ed352a6e59'
'022100fc38d05d7dfbe51e28c36d854dd0dcc938d60a3e406573c3dc39253694d14a12014104630aaf9d5c8d757cb5759428d4075911a2b2ff13dd7208ad7ea1d'
'1682738a7138be93ee526c9d774e0dea03fa2a5fbb68043259ddfb942c0763f9b636b40c43fffffffffa14fd232f045f0c9f28c6848a22fee393152e901eaa61a'
'9f18438b3ba05c6035020000008c493046022100cb423b63197ef3cdbfaed69f61aac59755f0025bd6d7a9d3c78024d897ebcf94022100f3ad14804a3c8042387'
'eca9b9053abe99e12651a795cae7f546b08e1c08c6464014104649694df12dcd7fdb5a8c54c376b904bd7337891d865b8d306beb5d2e5d8fdf2a537d6f9df65ff'
'44eb0b6042ebfdf9e338bff7f4afacb359dd6c71aea7b9b92dffffffffa14fd232f045f0c9f28c6848a22fee393152e901eaa61a9f18438b3ba05c60350300000'
'08c49304602220000fb9f4ddc68497a266362d489abf05184909a2b99aa64803061c88597b725877802207f39cf5a90a305aee45f365cf9e2d258e37cab4da6c123'
'aa287635cd1fd40dd001410438252055130f3dd242201684931550c4065efc1b87c48192f75868f747e2a9df9a700fed7e90068bd395c58680bd593780c8119e7'
'981dae08c345588f120fcb4ffffffff02e069f902000000001976a914ad00cf2b893e132c33a79a22ae938d6309c780a488ac80f0fa02000000001976a9143155'
'18b646ea65ad148ee1e2f0360233617447e288ac00000000')
# Here's a full block, which we should be able to parse and process
hexBlock = ( \
'01000000eb10c9a996a2340a4d74eaab41421ed8664aa49d18538bab59010000000000005a2f06efa9f2bd804f17877537f2080030cadbfa1eb50e02338117cc'
'604d91b9b7541a4ecfbb0a1a64f1ade70301000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0804cfbb0a1a'
'02360affffffff0100f2052a01000000434104c2239c4eedb3beb26785753463be3ec62b82f6acd62efb65f452f8806f2ede0b338e31d1f69b1ce449558d7061'
'aa1648ddc2bf680834d3986624006a272dc21cac000000000100000003e8caa12bcb2e7e86499c9de49c45c5a1c6167ea4b894c8c83aebba1b6100f343010000'
'008c493046022100e2f5af5329d1244807f8347a2c8d9acc55a21a5db769e9274e7e7ba0bb605b26022100c34ca3350df5089f3415d8af82364d7f567a6a297f'
'cc2c1d2034865633238b8c014104129e422ac490ddfcb7b1c405ab9fb42441246c4bca578de4f27b230de08408c64cad03af71ee8a3140b40408a7058a1984a9'
'f246492386113764c1ac132990d1ffffffff5b55c18864e16c08ef9989d31c7a343e34c27c30cd7caa759651b0e08cae0106000000008c4930460221009ec9aa'
'3e0caf7caa321723dea561e232603e00686d4bfadf46c5c7352b07eb00022100a4f18d937d1e2354b2e69e02b18d11620a6a9332d563e9e2bbcb01cee559680a'
'014104411b35dd963028300e36e82ee8cf1b0c8d5bf1fc4273e970469f5cb931ee07759a2de5fef638961726d04bd5eb4e5072330b9b371e479733c942964bb8'
'6e2b22ffffffff3de0c1e913e6271769d8c0172cea2f00d6d3240afc3a20f9fa247ce58af30d2a010000008c493046022100b610e169fd15ac9f60fe2b507529'
'281cf2267673f4690ba428cbb2ba3c3811fd022100ffbe9e3d71b21977a8e97fde4c3ba47b896d08bc09ecb9d086bb59175b5b9f03014104ff07a1833fd8098b'
'25f48c66dcf8fde34cbdbcc0f5f21a8c2005b160406cbf34cc432842c6b37b2590d16b165b36a3efc9908d65fb0e605314c9b278f40f3e1affffffff0240420f'
'00000000001976a914adfa66f57ded1b655eb4ccd96ee07ca62bc1ddfd88ac007d6a7d040000001976a914981a0c9ae61fa8f8c96ae6f8e383d6e07e77133e88'
'ac00000000010000000138e7586e0784280df58bd3dc5e3d350c9036b1ec4107951378f45881799c92a4000000008a47304402207c945ae0bbdaf9dadba07bdf'
'23faa676485a53817af975ddf85a104f764fb93b02201ac6af32ddf597e610b4002e41f2de46664587a379a0161323a85389b4f82dda014104ec8883d3e4f7a3'
'9d75c9f5bb9fd581dc9fb1b7cdf7d6b5a665e4db1fdb09281a74ab138a2dba25248b5be38bf80249601ae688c90c6e0ac8811cdb740fcec31dffffffff022f66'
'ac61050000001976a914964642290c194e3bfab661c1085e47d67786d2d388ac2f77e200000000001976a9141486a7046affd935919a3cb4b50a8a0c233c286c'
'88ac00000000')
# I made these two tx in a fake blockchain... but they should still work
tx1Fake = PyTx().unserialize(hex_to_binary( (
'01000000 0163451d 1002611c 1388d5ba 4ddfdf99 196a86b5 990fb5b0 dc786207'
'4fdcb8ee d2000000 004a4930 46022100 cb02fb5a 910e7554 85e3578e 6e9be315'
'a161540a 73f84ee6 f5d68641 925c59ac 0221007e 530a1826 30b50e2c 12dd09cd'
'ebfd809f 038be982 bdc2c7e9 d4cbf634 9e088d01 ffffffff 0200ca9a 3b000000'
'001976a9 14cb2abd e8bccacc 32e893df 3a054b9e f7f227a4 ce88ac00 286bee00'
'00000019 76a914ee 26c56fc1 d942be8d 7a24b2a1 001dd894 69398088 ac000000'
'00' ).replace(' ','')))
tx2Fake = PyTx().unserialize(hex_to_binary( (
'01000000 01a5b837 da38b64a 6297862c ba8210d0 21ac59e1 2b7c6d7e 70c355f6'
'972ee7a8 6e010000 008c4930 46022100 89e47100 d88d5f8c 8f62a796 dac3afb8'
'f090c6fc 2eb0c4af ac7b7567 3a364c01 0221002b f40e554d ae51264b 0a86df17'
'3e45756a 89bbd302 4f166cc4 2cfd1874 13636901 41046868 0737c76d abb801cb'
'2204f57d be4e4579 e4f710cd 67dc1b42 27592c81 e9b5cf02 b5ac9e8b 4c9f49be'
'5251056b 6a6d011e 4c37f6b6 d17ede6b 55faa235 19e2ffff ffff0100 286bee00'
'00000019 76a914c5 22664fb0 e55cdc5c 0cea73b4 aad97ec8 34323288 ac000000'
'00' ).replace(' ','')))
ALL_ZERO_OUTPOINT = hex_to_binary('00' * 36)
class PyTXTest(TiabTest):
def testMinimizeDERSignaturePadding(self):
multiTx1 = PyTx().unserialize(multiTx1raw)
paddingMinimizedMulti1, newTxMulti1 = multiTx1.minimizeDERSignaturePadding()
self.assertEqual(multiTx1.inputs[0].binScript, newTxMulti1.inputs[0].binScript)
self.assertEqual(multiTx1.inputs[1].binScript, newTxMulti1.inputs[1].binScript)
self.assertEqual(multiTx1.inputs[2].binScript, newTxMulti1.inputs[2].binScript)
self.assertEqual(multiTx1.inputs[3].binScript, newTxMulti1.inputs[3].binScript)
self.assertFalse(paddingMinimizedMulti1)
txString = multiTx1.toString()
self.assertTrue(len(txString)> 0)
multiTx2 = PyTx().unserialize(multiTx2raw)
paddingMinimizedMulti2, newTxMulti2 = multiTx2.minimizeDERSignaturePadding()
self.assertEqual(multiTx2.inputs[0].binScript, newTxMulti2.inputs[0].binScript)
self.assertEqual(multiTx2.inputs[1].binScript, newTxMulti2.inputs[1].binScript)
self.assertEqual(multiTx2.inputs[2].binScript, newTxMulti2.inputs[2].binScript)
# Added 1 extra byte of padding
self.assertEqual(len(multiTx2.inputs[3].binScript)-1, len(newTxMulti2.inputs[3].binScript))
self.assertTrue(paddingMinimizedMulti2)
tx1 = PyTx().unserialize(tx1raw)
paddingMinimized1, newTx1 = tx1.minimizeDERSignaturePadding()
self.assertEqual(tx1.inputs[0].binScript, newTx1.inputs[0].binScript)
self.assertFalse(paddingMinimized1)
tx2 = PyTx().unserialize(tx2raw)
paddingMinimized2, newTx2 = tx2.minimizeDERSignaturePadding()
# Old tx had 2 extra bytes of padding one each on the r and s
self.assertEqual(len(tx2.inputs[0].binScript)-2, len(newTx2.inputs[0].binScript))
self.assertTrue(paddingMinimized2)
def testSerializeUnserialize(self):
tx1 = PyTx().unserialize(tx1raw)
tx2 = PyTx().unserialize(BinaryUnpacker(tx2raw))
tx1again = tx1.serialize()
tx2again = tx2.serialize()
self.assertEqual(tx1again, tx1raw)
self.assertEqual(tx2again, tx2raw)
blk = PyBlock().unserialize( hex_to_binary(hexBlock) )
blockReHex = binary_to_hex(blk.serialize())
self.assertEqual(hexBlock, blockReHex)
binRoot = blk.blockData.getMerkleRoot()
self.assertEqual(blk.blockHeader.merkleRoot, blk.blockData.merkleRoot)
def testCreateTx(self):
addrA = PyBtcAddress().createFromPrivateKey(hex_to_int('aa' * 32))
addrB = PyBtcAddress().createFromPrivateKey(hex_to_int('bb' * 32))
# This TxIn will be completely ignored, so it can contain garbage
txinA = PyTxIn()
txinA.outpoint = PyOutPoint().unserialize(hex_to_binary('00'*36))
txinA.binScript = hex_to_binary('99'*4)
txinA.intSeq = hex_to_int('ff'*4)
# test binary unpacker in unserialize
testTxIn = PyTxIn().unserialize(txinA.serialize())
self.assertEqual(txinA.getScript(), testTxIn.getScript())
self.assertEqual(txinA.intSeq, testTxIn.intSeq)
self.assertEqual(txinA.outpoint.txHash, testTxIn.outpoint.txHash)
txoutA = PyTxOut()
txoutA.value = 50 * ONE_BTC
txoutA.binScript = '\x76\xa9\x14' + addrA.getAddr160() + '\x88\xac'
# Test pprint
print '\nTest pretty print PyTxIn, expect PrevTXHash all 0s'
testTxIn.pprint()
# test binary unpacker in unserialize
testTxOut = PyTxOut().unserialize(txoutA.serialize())
self.assertEqual(txoutA.getScript(), testTxOut.getScript())
self.assertEqual(txoutA.value, testTxOut.getValue())
# Test pprint
print '\nTest pretty print PyTxOut'
testTxOut.pprint()
tx1 = PyTx()
tx1.version = 1
tx1.numInputs = 1
tx1.inputs = [txinA]
tx1.numOutputs = 1
tx1.outputs = [txoutA]
tx1.locktime = 0
tx1hash = tx1.getHash()
recipientList = tx1.makeRecipientsList()
self.assertEqual(len(recipientList), 1)
self.assertEqual(recipientList[0][0], 0)
self.assertEqual(recipientList[0][1], 50 * ONE_BTC)
self.assertEqual(tx1.getHashHex(), binary_to_hex(tx1hash))
# Creating transaction to send coins from A to B
tx2 = PyCreateAndSignTx_old( [[ addrA, tx1, 0 ]], [[addrB, 50*ONE_BTC]])
psp = PyScriptProcessor()
psp.setTxObjects(tx1, tx2, 0)
self.assertTrue(psp.verifyTransactionValid())
def testVerifyTxFromFakeBlockChain(self):
psp = PyScriptProcessor()
psp.setTxObjects(tx1Fake, tx2Fake, 0)
self.assertTrue(psp.verifyTransactionValid())
def test2of2MultiSigTx(self):
tx1 = PyTx().unserialize(hex_to_binary('010000000189a0022c8291b4328338ec95179612b8ebf72067051de019a6084fb97eae0ebe000000004a4930460221009627882154854e3de066943ba96faba02bb8b80c1670a0a30d0408caa49f03df022100b625414510a2a66ebb43fffa3f4023744695380847ee1073117ec90cb60f2c8301ffffffff0210c18d0000000000434104a701496f10db6aa8acbb6a7aa14d62f4925f8da03de7f0262010025945f6ebcc3efd55b6aa4bc6f811a0dc1bbdd2644bdd81c8a63766aa11f650cd7736bbcaf8ac001bb7000000000043526b006b7dac7ca914fc1243972b59c1726735d3c5cca40e415039dce9879a6c936b7dac7ca914375dd72e03e7b5dbb49f7e843b7bef4a2cc2ce9e879a6c936b6c6ca200000000'))
tx2 = PyTx().unserialize(hex_to_binary('01000000011c9608650a912be7fa88eecec664e6fbfa4b676708697fa99c28b3370005f32d01000000fd1701483045022017462c29efc9158cf26f2070d444bb2b087b8a0e6287a9274fa36fad30c46485022100c6d4cc6cd504f768389637df71c1ccd452e0691348d0f418130c31da8cc2a6e8014104e83c1d4079a1b36417f0544063eadbc44833a992b9667ab29b4ff252d8287687bad7581581ae385854d4e5f1fcedce7de12b1aec1cb004cabb2ec1f3de9b2e60493046022100fdc7beb27de0c3a53fbf96df7ccf9518c5fe7873eeed413ce17e4c0e8bf9c06e022100cc15103b3c2e1f49d066897fe681a12e397e87ed7ee39f1c8c4a5fef30f4c2c60141047cf315904fcc2e3e2465153d39019e0d66a8aaec1cec1178feb10d46537427239fd64b81e41651e89b89fefe6a23561d25dddc835395dd3542f83b32a1906aebffffffff01c0d8a700000000001976a914fc1243972b59c1726735d3c5cca40e415039dce988ac00000000'))
# Verify 2-of-2 tx from Testnet
psp = PyScriptProcessor()
psp.setTxObjects(tx1, tx2, 0)
self.assertTrue(psp.verifyTransactionValid())
def test2of3MultiSigTx(self):
tx1 = PyTx().unserialize(hex_to_binary('010000000371c06e0639dbe6bc35e6f948da4874ae69d9d91934ec7c5366292d0cbd5f97b0010000008a47304402200117cdd3ec6259af29acea44db354a6f57ac10d8496782033f5fe0febfd77f1b02202ceb02d60dbb43e6d4e03e5b5fbadc031f8bbb3c6c34ad307939947987f600bf01410452d63c092209529ca2c75e056e947bc95f9daffb371e601b46d24377aaa3d004ab3c6be2d6d262b34d736b95f3b0ef6876826c93c4077d619c02ebd974c7facdffffffffa65aa866aa7743ec05ba61418015fc32ecabd99886732056f1d4454c8f762bf8000000008c493046022100ea0a9b41c9372837e52898205c7bebf86b28936a3ee725672d0ca8f434f876f0022100beb7243a51fbc0997e55cb519d3b9cbd59f7aba68d80ba1e8adbb53443cda3c00141043efd1ca3cffc50638031281d227ff347a3a27bc145e2f846891d29f87bc068c27710559c4d9cd71f7e9e763d6e2753172406eb1ed1fadcaf9a8972b4270f05b4ffffffffd866d14151ee1b733a2a7273f155ecb25c18303c31b2c4de5aa6080aef2e0006000000008b483045022052210f95f6b413c74ce12cfc1b14a36cb267f9fa3919fa6e20dade1cd570439f022100b9e5b325f312904804f043d06c6ebc8e4b1c6cd272856c48ab1736b9d562e10c01410423fdddfe7e4d70d762dd6596771e035f4b43d54d28c2231be1102056f81f067914fe4fb6fd6e3381228ee5587ddd2028c846025741e963d9b1d6cf2c2dea0dbcffffffff0210ef3200000000004341048a33e9fd2de28137574cc69fe5620199abe37b7d08a51c528876fe6c5fa7fc28535f5a667244445e79fffc9df85ec3d79d77693b1f37af0e2d7c1fa2e7113a48acc0d454070000000061526b006b7dac7ca9143cd1def404e12a85ead2b4d3f5f9f817fb0d46ef879a6c936b7dac7ca9146a4e7d5f798e90e84db9244d4805459f87275943879a6c936b7dac7ca914486efdd300987a054510b4ce1148d4ad290d911e879a6c936b6c6ca200000000'))
tx2 = PyTx().unserialize(hex_to_binary('01000000012f654d4d1d7246d1a824c5b6c5177c0b5a1983864579aabb88cabd5d05e032e201000000fda0014730440220151ad44e7f78f9e0c4a3f2135c19ca3de8dbbb7c58893db096c0c5f1573d5dec02200724a78c3fa5f153103cb46816df46eb6cfac3718038607ddec344310066161e01410459fd82189b81772258a3fc723fdda900eb8193057d4a573ee5ad39e26b58b5c12c4a51b0edd01769f96ed1998221daf0df89634a7137a8fa312d5ccc95ed8925483045022100ca34834ece5925cff6c3d63e2bda6b0ce0685b18f481c32e70de9a971e85f12f0220572d0b5de0cf7b8d4e28f4914a955e301faaaa42f05feaa1cc63b45f938d75d9014104ce6242d72ee67e867e6f8ec434b95fcb1889c5b485ec3414df407e11194a7ce012eda021b68f1dd124598a9b677d6e7d7c95b1b7347f5c5a08efa628ef0204e1483045022074e01e8225e8c4f9d0b3f86908d42a61e611f406e13817d16240f94f52f49359022100f4c768dd89c6435afd3834ae2c882465ade92d7e1cc5c2c2c3d8d25c41b3ea61014104ce66c9f5068b715b62cc1622572cd98a08812d8ca01563045263c3e7af6b997e603e8e62041c4eb82dfd386a3412c34c334c34eb3c76fb0e37483fc72323f807ffffffff01b0ad5407000000001976a9146a4e7d5f798e90e84db9244d4805459f8727594388ac00000000'))
# Verify 2-of-3 tx from Testnet
psp = PyScriptProcessor()
psp.setTxObjects(tx1, tx2, 0)
self.assertTrue(psp.verifyTransactionValid())
def testMultiSig(self):
tx1 = PyTx().unserialize(hex_to_binary('0100000001845ad165bdc0f9b5829cf5a594c4148dfd89e24756303f3a8dabeb597afa589b010000008b483045022063c233df8efa3d1885e069e375a8eabf16b23475ef21bdc9628a513ee4caceb702210090a102c7b602043e72b34a154d495ac19b3b9e42acb962c399451f2baead8f4c014104b38f79037ad25b84a564eaf53ede93dec70b35216e6682aa71a47cefa2996ec49acfbb0a8730577c62ef9a7cc20c740aaaaee75419bef9640a4216c2b49c42d3ffffffff02000c022900000000434104c08c0a71ccbe838403e3870aa1ab871b0ab3a6014b0ba41f6df2b9aefea73134ecaa0b27797620e402a33799e9047f86519d9e43bbd504cf753c293752933f4fac406f40010000000062537a7652a269537a829178a91480677c5392220db736455533477d0bc2fba65502879b69537a829178a91402d7aa2e76d9066fb2b3c41ff8839a5c81bdca19879b69537a829178a91410039ce4fdb5d4ee56148fe3935b9bfbbe4ecc89879b6953ae00000000'))
tx2 = PyTx().unserialize(hex_to_binary('0100000001bb664ff716b9dfc831bcc666c1767f362ad467fcfbaf4961de92e45547daab8701000000fd190100493046022100d73f633f114e0e0b324d87d38d34f22966a03b072803afa99c9408201f6d6dc6022100900e85be52ad2278d24e7edbb7269367f5f2d6f1bd338d017ca460008776614401473044022071fef8ac0aa6318817dbd242bf51fb5b75be312aa31ecb44a0afe7b49fcf840302204c223179a383bb6fcb80312ac66e473345065f7d9136f9662d867acf96c12a42015241048c006ff0d2cfde86455086af5a25b88c2b81858aab67f6a3132c885a2cb9ec38e700576fd46c7d72d7d22555eee3a14e2876c643cd70b1b0a77fbf46e62331ac4104b68ef7d8f24d45e1771101e269c0aacf8d3ed7ebe12b65521712bba768ef53e1e84fff3afbee360acea0d1f461c013557f71d426ac17a293c5eebf06e468253e00ffffffff0280969800000000001976a9140817482d2e97e4be877efe59f4bae108564549f188ac7015a7000000000062537a7652a269537a829178a91480677c5392220db736455533477d0bc2fba65502879b69537a829178a91402d7aa2e76d9066fb2b3c41ff8839a5c81bdca19879b69537a829178a91410039ce4fdb5d4ee56148fe3935b9bfbbe4ecc89879b6953ae00000000'))
# OP_CHECKMULTISIG from Testnet
psp = PyScriptProcessor()
psp.setTxObjects(tx1, tx2, 0)
self.assertTrue(psp.verifyTransactionValid())
'''
def testMultiSigAddrExtraction(self):
script1 = hex_to_binary('4104b54b5fc1917945fff64785d4baaca66a9704e9ed26002f51f53763499643321fbc047683a62be16e114e25404ce6ffdcf625a928002403402bf9f01e5cbd5f3dad4104f576e534f9bbf6d7c5f186ff4c6e0c5442c2755314bdee62fbc656f94d6cbf32c5eb3522da21cf9f954133000ffccb20dbfec030737640cc3315ce09619210d0ac')
expectedBtcAddrList1 = ['1KmV9FdKJEFFCHydZUZGdBL9uKq2T9JUm8','13maaQeK5qSPjHwnHhwNUtNKruK3qYLwvv']
self.verifyMultiSigAddrExtraction(script1, expectedBtcAddrList1)
script2 = hex_to_binary('537a7652a269537a829178a91480677c5392220db736455533477d0bc2fba65502879b69537a829178a91402d7aa2e76d9066fb2b3c41ff8839a5c81bdca19879b69537a829178a91410039ce4fdb5d4ee56148fe3935b9bfbbe4ecc89879b6953ae')
expectedBtcAddrList2 = ['1ChwTs5Dmh6y9iDh4pjWyu2X6nAhjre7SV','1G2i31fxRqaoXBfYMuE4YKb9x96uYcHeQ','12Tg96ZPSYc3P2g5c9c4znFFH2whriN9NQ']
self.verifyMultiSigAddrExtraction(script2, expectedBtcAddrList2)
script3 = hex_to_binary('527a7651a269527a829178a914731cdb75c88a01cbb96729888f726b3b9f29277a879b69527a829178a914e9b4261c6122f8957683636548923acc069e8141879b6952ae')
expectedBtcAddrList3 = ['1BVfH6iKT1s8fYEVSj39QkJrPqCKN4hv2m','1NJiFfFPZ177Pv96Yt4FCNZFEumyL2eKmt']
self.verifyMultiSigAddrExtraction(script3, expectedBtcAddrList3)
'''
def verifyMultiSigAddrExtraction(self, scr, expectedBtcAddrList):
addrList = getMultisigScriptInfo(scr)[2]
btcAddrList = []
for a in addrList:
btcAddrList.append(PyBtcAddress().createFromPublicKeyHash160(a).getAddrStr())
self.assertEqual(btcAddrList, expectedBtcAddrList)
def testUnpackUnserializePyOutPoint(self):
outpoint = PyOutPoint().unserialize(BinaryUnpacker(ALL_ZERO_OUTPOINT))
self.assertEqual(outpoint.txHash, hex_to_binary('00'*32))
self.assertEqual(outpoint.txOutIndex, 0)
def testCopyPyOutPoint(self):
outpoint = PyOutPoint().unserialize(BinaryUnpacker(ALL_ZERO_OUTPOINT))
outpointCopy = outpoint.copy()
self.assertEqual(outpoint.txHash, outpointCopy.txHash)
self.assertEqual(outpoint.txOutIndex, outpointCopy.txOutIndex)
def testPPrintPyOutPoint(self):
# No return value - Should just print 0s
outpoint = PyOutPoint().unserialize(BinaryUnpacker(ALL_ZERO_OUTPOINT))
print "PyOutPoint PPrint Test. Expect all 0s: "
outpoint.pprint()
'''
Does not pass because fromCpp is missing
def testCreateCppFromCppPyOutPoint(self):
outpoint = PyOutPoint().unserialize(BinaryUnpacker(ALL_ZERO_OUTPOINT))
outpointFromCpp = PyOutPoint().fromCpp(outpoint.createCpp())
self.assertEqual(outpoint.txHash, outpointFromCpp.txHash)
self.assertEqual(outpoint.txOutIndex, outpointFromCpp.txOutIndex)
'''
def testBogusBlockComponent(self):
class TestBlockComponent(BlockComponent):
pass
testBlkComp = TestBlockComponent()
self.assertRaises(NotImplementedError, testBlkComp.serialize)
self.assertRaises(NotImplementedError, testBlkComp.unserialize)
# TODO: Add some tests for the OP_CHECKMULTISIG support in TxDP
# Running tests with "python <module name>" will NOT work for any Armory tests
# You must run tests with "python -m unittest <module name>" or run all tests with "python -m unittest discover"
# if __name__ == "__main__":
# unittest.main()
| 77.611296 | 1,513 | 0.85326 | 1,127 | 23,361 | 17.61402 | 0.374445 | 0.022669 | 0.011637 | 0.011083 | 0.19324 | 0.167246 | 0.154148 | 0.146239 | 0.13687 | 0.13687 | 0 | 0.432799 | 0.097385 | 23,361 | 300 | 1,514 | 77.87 | 0.508631 | 0.037926 | 0 | 0.205479 | 0 | 0 | 0.625924 | 0.573003 | 0 | 1 | 0 | 0.003333 | 0.182648 | 0 | null | null | 0.004566 | 0.041096 | null | null | 0.027397 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c24b0095d48dd819984cda19052dc890a2ec56e3 | 132,716 | py | Python | old/baxter_utils_3.py | YoshimitsuMatsutaIe/hoge_flow_test | 22e2e2ce043a3107bd06449f6f9958641293e414 | [
"MIT"
] | null | null | null | old/baxter_utils_3.py | YoshimitsuMatsutaIe/hoge_flow_test | 22e2e2ce043a3107bd06449f6f9958641293e414 | [
"MIT"
] | null | null | null | old/baxter_utils_3.py | YoshimitsuMatsutaIe/hoge_flow_test | 22e2e2ce043a3107bd06449f6f9958641293e414 | [
"MIT"
] | null | null | null | """baxterロボットの色々関数とクラス
・順運動学写像,ヤコビ行列など
・無印よりちょっと速い
"""
import numpy as np
from math import cos, sin, tan, pi, sqrt
class BaxterKinematics3:
"""ローカル座標系の原点,ヤコビ行列等を計算
・ちょっとはやい
"""
L = 278e-3
h = 64e-3
H = 1104e-3
L0 = 270.35e-3
L1 = 69e-3
L2 = 364.35e-3
L3 = 69e-3
L4 = 374.29e-3
L5 = 10e-3
L6 = 368.3e-3
def __init__(self, **kwargs):
"""こっちが速い(なんで)"""
return
def origins(self, q):
"""世界座標系から見た局所座標系の原点座標をまとめて計算"""
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
o_Wo = np.array([
[0],
[0],
[0]
])
o_BL = np.array([
[self.L],
[-self.h],
[self.H]
])
o_0 = np.array([
[self.L],
[-self.h],
[self.H + self.L0]
])
o_1 = np.array([
[self.L],
[-self.h],
[self.H + self.L0]
])
o_2 = np.array([
[self.L + 0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1)],
[0.707106781186548*self.L1*sin(q1) - 0.707106781186548*self.L1*cos(q1) - self.h],
[self.H + self.L0]
])
o_3 = np.array([
[self.L + 0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2)],
[0.707106781186548*self.L1*sin(q1) - 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) - 0.707106781186548*self.L2*cos(q1)*cos(q2) - self.h],
[self.H + self.L0 - self.L2*sin(q2)]
])
o_4 = np.array([
[self.L + 0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))],
[0.707106781186548*self.L1*sin(q1) - 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) - 0.707106781186548*self.L2*cos(q1)*cos(q2) - 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1)) - self.h],
[self.H + self.L0 - self.L2*sin(q2) - self.L3*cos(q2)*cos(q3)]
])
o_5 = np.array([
[self.L + 0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1)) - 0.707106781186548*self.L4*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L4*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4))],
[0.707106781186548*self.L1*sin(q1) - 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) - 0.707106781186548*self.L2*cos(q1)*cos(q2) - 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1)) + 0.707106781186548*self.L4*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L4*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4)) - self.h],
[self.H + self.L0 - self.L2*sin(q2) - self.L3*cos(q2)*cos(q3) - self.L4*(sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3))]
])
o_6 = np.array([
[self.L + 0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1)) - 0.707106781186548*self.L4*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L4*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*cos(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*sin(q5))],
[0.707106781186548*self.L1*sin(q1) - 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) - 0.707106781186548*self.L2*cos(q1)*cos(q2) - 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1)) + 0.707106781186548*self.L4*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L4*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L5*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*cos(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*sin(q5)) - self.h],
[self.H + self.L0 - self.L2*sin(q2) - self.L3*cos(q2)*cos(q3) - self.L4*(sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3)) + self.L5*((sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q3)*sin(q5)*cos(q2))]
])
o_7 = np.array([
[self.L + 0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1)) - 0.707106781186548*self.L4*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L4*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*cos(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*sin(q5))],
[0.707106781186548*self.L1*sin(q1) - 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) - 0.707106781186548*self.L2*cos(q1)*cos(q2) - 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1)) + 0.707106781186548*self.L4*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L4*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L5*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*cos(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*sin(q5)) - self.h],
[self.H + self.L0 - self.L2*sin(q2) - self.L3*cos(q2)*cos(q3) - self.L4*(sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3)) + self.L5*((sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q3)*sin(q5)*cos(q2))]
])
o_GL = np.array([
[self.L + 0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1)) - 0.707106781186548*self.L4*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L4*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*cos(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*sin(q5)) + self.L6*(0.707106781186548*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5))*sin(q6) + 0.707106781186548*(((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*cos(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*sin(q5))*sin(q6) + 0.707106781186548*((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) + cos(q1)*cos(q2)*cos(q4))*cos(q6) + 0.707106781186548*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) + sin(q1)*cos(q2)*cos(q4))*cos(q6))],
[0.707106781186548*self.L1*sin(q1) - 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) - 0.707106781186548*self.L2*cos(q1)*cos(q2) - 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1)) + 0.707106781186548*self.L4*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L4*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L5*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*cos(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*sin(q5)) + self.L6*(-0.707106781186548*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5))*sin(q6) + 0.707106781186548*(((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*cos(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*sin(q5))*sin(q6) - 0.707106781186548*((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) + cos(q1)*cos(q2)*cos(q4))*cos(q6) + 0.707106781186548*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) + sin(q1)*cos(q2)*cos(q4))*cos(q6)) - self.h],
[self.H + self.L0 - self.L2*sin(q2) - self.L3*cos(q2)*cos(q3) - self.L4*(sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3)) + self.L5*((sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q3)*sin(q5)*cos(q2)) + self.L6*(((sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q3)*sin(q5)*cos(q2))*sin(q6) + (-sin(q2)*cos(q4) - sin(q4)*cos(q2)*cos(q3))*cos(q6))]
])
return [o_Wo, o_BL, o_0, o_1, o_2, o_3, o_4, o_5, o_6, o_7, o_GL]
# ヤコビ行列
def jacobi_Wo(self, q):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
z = np.zeros((3, 7))
return z
def jacobi_BL(self, q):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
z = np.zeros((3, 7))
return z
def jacobi_0(self, q):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
z = np.zeros((3, 7))
return z
def jacobi_1(self, q):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
z = np.zeros((3, 7))
return z
def jacobi_2(self, q):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
z = np.array([
[-0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1), 0, 0, 0, 0, 0, 0],
[0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1), 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]
])
return z
def jacobi_3(self, q):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
z = np.array([
[-0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) - 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2), -0.707106781186548*self.L2*sin(q1)*sin(q2) - 0.707106781186548*self.L2*sin(q2)*cos(q1), 0, 0, 0, 0, 0],
[0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2), -0.707106781186548*self.L2*sin(q1)*sin(q2) + 0.707106781186548*self.L2*sin(q2)*cos(q1), 0, 0, 0, 0, 0],
[0, -self.L2*cos(q2), 0, 0, 0, 0, 0]
])
return z
def jacobi_4(self, q):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
z = np.array([
[-0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) - 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1)), -0.707106781186548*self.L2*sin(q1)*sin(q2) - 0.707106781186548*self.L2*sin(q2)*cos(q1) - 0.707106781186548*self.L3*sin(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L3*cos(q1)*cos(q2)*cos(q3), 0.707106781186548*self.L3*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3)), 0, 0, 0, 0],
[0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) - 0.707106781186548*self.L3*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1)), -0.707106781186548*self.L2*sin(q1)*sin(q2) + 0.707106781186548*self.L2*sin(q2)*cos(q1) - 0.707106781186548*self.L3*sin(q1)*cos(q2)*cos(q3) + 0.707106781186548*self.L3*cos(q1)*cos(q2)*cos(q3), -0.707106781186548*self.L3*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3)), 0, 0, 0, 0],
[0, -self.L2*cos(q2) + self.L3*sin(q2)*cos(q3), self.L3*sin(q3)*cos(q2), 0, 0, 0, 0]
])
return z
def jacobi_5(self, q):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
z = np.array([
[-0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) - 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1)) - 0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L4*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) + sin(q1)*cos(q2)*cos(q4)), -0.707106781186548*self.L2*sin(q1)*sin(q2) - 0.707106781186548*self.L2*sin(q2)*cos(q1) - 0.707106781186548*self.L3*sin(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L3*cos(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L4*(sin(q1)*sin(q2)*cos(q4) + sin(q1)*sin(q4)*cos(q2)*cos(q3)) - 0.707106781186548*self.L4*(sin(q2)*cos(q1)*cos(q4) + sin(q4)*cos(q1)*cos(q2)*cos(q3)), 0.707106781186548*self.L3*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3)) - 0.707106781186548*self.L4*(sin(q1)*cos(q3) - sin(q2)*sin(q3)*cos(q1))*sin(q4) - 0.707106781186548*self.L4*(-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q4), -0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*cos(q4) + sin(q4)*cos(q1)*cos(q2)) - 0.707106781186548*self.L4*((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2)), 0, 0, 0],
[0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) - 0.707106781186548*self.L3*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1)) - 0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) + 0.707106781186548*self.L4*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) + sin(q1)*cos(q2)*cos(q4)), -0.707106781186548*self.L2*sin(q1)*sin(q2) + 0.707106781186548*self.L2*sin(q2)*cos(q1) - 0.707106781186548*self.L3*sin(q1)*cos(q2)*cos(q3) + 0.707106781186548*self.L3*cos(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L4*(sin(q1)*sin(q2)*cos(q4) + sin(q1)*sin(q4)*cos(q2)*cos(q3)) + 0.707106781186548*self.L4*(sin(q2)*cos(q1)*cos(q4) + sin(q4)*cos(q1)*cos(q2)*cos(q3)), -0.707106781186548*self.L3*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3)) + 0.707106781186548*self.L4*(sin(q1)*cos(q3) - sin(q2)*sin(q3)*cos(q1))*sin(q4) - 0.707106781186548*self.L4*(-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q4), 0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*cos(q4) + sin(q4)*cos(q1)*cos(q2)) - 0.707106781186548*self.L4*((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2)), 0, 0, 0],
[0, -self.L2*cos(q2) + self.L3*sin(q2)*cos(q3) - self.L4*(-sin(q2)*sin(q4)*cos(q3) + cos(q2)*cos(q4)), self.L3*sin(q3)*cos(q2) + self.L4*sin(q3)*sin(q4)*cos(q2), -self.L4*(-sin(q2)*sin(q4) + cos(q2)*cos(q3)*cos(q4)), 0, 0, 0]
])
return z
def jacobi_6(self, q):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
z = np.array([
[-0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) - 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1)) - 0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L4*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) + sin(q1)*cos(q2)*cos(q4)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5)) + 0.707106781186548*self.L5*(((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2))*cos(q5) + (-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q5)), -0.707106781186548*self.L2*sin(q1)*sin(q2) - 0.707106781186548*self.L2*sin(q2)*cos(q1) - 0.707106781186548*self.L3*sin(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L3*cos(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L4*(sin(q1)*sin(q2)*cos(q4) + sin(q1)*sin(q4)*cos(q2)*cos(q3)) - 0.707106781186548*self.L4*(sin(q2)*cos(q1)*cos(q4) + sin(q4)*cos(q1)*cos(q2)*cos(q3)) + 0.707106781186548*self.L5*((sin(q1)*sin(q2)*sin(q4) - sin(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q1)*sin(q3)*sin(q5)*cos(q2)) + 0.707106781186548*self.L5*((sin(q2)*sin(q4)*cos(q1) - cos(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q3)*sin(q5)*cos(q1)*cos(q2)), 0.707106781186548*self.L3*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3)) - 0.707106781186548*self.L4*(sin(q1)*cos(q3) - sin(q2)*sin(q3)*cos(q1))*sin(q4) - 0.707106781186548*self.L4*(-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q4) + 0.707106781186548*self.L5*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q4)*cos(q5)) + 0.707106781186548*self.L5*((sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q4)*cos(q5) + (sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*sin(q5)), -0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*cos(q4) + sin(q4)*cos(q1)*cos(q2)) - 0.707106781186548*self.L4*((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2)) + 0.707106781186548*self.L5*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4))*cos(q5) + 0.707106781186548*self.L5*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4))*cos(q5), 0.707106781186548*self.L5*(-((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*sin(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q5)) + 0.707106781186548*self.L5*(-((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*sin(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q5)), 0, 0],
[0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) - 0.707106781186548*self.L3*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1)) - 0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) + 0.707106781186548*self.L4*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) + sin(q1)*cos(q2)*cos(q4)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5)) - 0.707106781186548*self.L5*(((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2))*cos(q5) + (-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q5)), -0.707106781186548*self.L2*sin(q1)*sin(q2) + 0.707106781186548*self.L2*sin(q2)*cos(q1) - 0.707106781186548*self.L3*sin(q1)*cos(q2)*cos(q3) + 0.707106781186548*self.L3*cos(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L4*(sin(q1)*sin(q2)*cos(q4) + sin(q1)*sin(q4)*cos(q2)*cos(q3)) + 0.707106781186548*self.L4*(sin(q2)*cos(q1)*cos(q4) + sin(q4)*cos(q1)*cos(q2)*cos(q3)) + 0.707106781186548*self.L5*((sin(q1)*sin(q2)*sin(q4) - sin(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q1)*sin(q3)*sin(q5)*cos(q2)) - 0.707106781186548*self.L5*((sin(q2)*sin(q4)*cos(q1) - cos(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q3)*sin(q5)*cos(q1)*cos(q2)), -0.707106781186548*self.L3*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3)) + 0.707106781186548*self.L4*(sin(q1)*cos(q3) - sin(q2)*sin(q3)*cos(q1))*sin(q4) - 0.707106781186548*self.L4*(-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q4) - 0.707106781186548*self.L5*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q4)*cos(q5)) + 0.707106781186548*self.L5*((sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q4)*cos(q5) + (sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*sin(q5)), 0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*cos(q4) + sin(q4)*cos(q1)*cos(q2)) - 0.707106781186548*self.L4*((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2)) - 0.707106781186548*self.L5*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4))*cos(q5) + 0.707106781186548*self.L5*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4))*cos(q5), -0.707106781186548*self.L5*(-((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*sin(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q5)) + 0.707106781186548*self.L5*(-((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*sin(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q5)), 0, 0],
[0, -self.L2*cos(q2) + self.L3*sin(q2)*cos(q3) - self.L4*(-sin(q2)*sin(q4)*cos(q3) + cos(q2)*cos(q4)) + self.L5*((sin(q2)*cos(q3)*cos(q4) + sin(q4)*cos(q2))*cos(q5) - sin(q2)*sin(q3)*sin(q5)), self.L3*sin(q3)*cos(q2) + self.L4*sin(q3)*sin(q4)*cos(q2) + self.L5*(sin(q3)*cos(q2)*cos(q4)*cos(q5) + sin(q5)*cos(q2)*cos(q3)), -self.L4*(-sin(q2)*sin(q4) + cos(q2)*cos(q3)*cos(q4)) + self.L5*(sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3))*cos(q5), self.L5*(-(sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*sin(q5) + sin(q3)*cos(q2)*cos(q5)), 0, 0]
])
return z
def jacobi_7(self, q):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
z = np.array([
[-0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) - 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1)) - 0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L4*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) + sin(q1)*cos(q2)*cos(q4)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5)) + 0.707106781186548*self.L5*(((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2))*cos(q5) + (-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q5)), -0.707106781186548*self.L2*sin(q1)*sin(q2) - 0.707106781186548*self.L2*sin(q2)*cos(q1) - 0.707106781186548*self.L3*sin(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L3*cos(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L4*(sin(q1)*sin(q2)*cos(q4) + sin(q1)*sin(q4)*cos(q2)*cos(q3)) - 0.707106781186548*self.L4*(sin(q2)*cos(q1)*cos(q4) + sin(q4)*cos(q1)*cos(q2)*cos(q3)) + 0.707106781186548*self.L5*((sin(q1)*sin(q2)*sin(q4) - sin(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q1)*sin(q3)*sin(q5)*cos(q2)) + 0.707106781186548*self.L5*((sin(q2)*sin(q4)*cos(q1) - cos(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q3)*sin(q5)*cos(q1)*cos(q2)), 0.707106781186548*self.L3*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3)) - 0.707106781186548*self.L4*(sin(q1)*cos(q3) - sin(q2)*sin(q3)*cos(q1))*sin(q4) - 0.707106781186548*self.L4*(-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q4) + 0.707106781186548*self.L5*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q4)*cos(q5)) + 0.707106781186548*self.L5*((sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q4)*cos(q5) + (sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*sin(q5)), -0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*cos(q4) + sin(q4)*cos(q1)*cos(q2)) - 0.707106781186548*self.L4*((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2)) + 0.707106781186548*self.L5*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4))*cos(q5) + 0.707106781186548*self.L5*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4))*cos(q5), 0.707106781186548*self.L5*(-((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*sin(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q5)) + 0.707106781186548*self.L5*(-((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*sin(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q5)), 0, 0],
[0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) - 0.707106781186548*self.L3*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1)) - 0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) + 0.707106781186548*self.L4*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) + sin(q1)*cos(q2)*cos(q4)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5)) - 0.707106781186548*self.L5*(((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2))*cos(q5) + (-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q5)), -0.707106781186548*self.L2*sin(q1)*sin(q2) + 0.707106781186548*self.L2*sin(q2)*cos(q1) - 0.707106781186548*self.L3*sin(q1)*cos(q2)*cos(q3) + 0.707106781186548*self.L3*cos(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L4*(sin(q1)*sin(q2)*cos(q4) + sin(q1)*sin(q4)*cos(q2)*cos(q3)) + 0.707106781186548*self.L4*(sin(q2)*cos(q1)*cos(q4) + sin(q4)*cos(q1)*cos(q2)*cos(q3)) + 0.707106781186548*self.L5*((sin(q1)*sin(q2)*sin(q4) - sin(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q1)*sin(q3)*sin(q5)*cos(q2)) - 0.707106781186548*self.L5*((sin(q2)*sin(q4)*cos(q1) - cos(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q3)*sin(q5)*cos(q1)*cos(q2)), -0.707106781186548*self.L3*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3)) + 0.707106781186548*self.L4*(sin(q1)*cos(q3) - sin(q2)*sin(q3)*cos(q1))*sin(q4) - 0.707106781186548*self.L4*(-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q4) - 0.707106781186548*self.L5*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q4)*cos(q5)) + 0.707106781186548*self.L5*((sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q4)*cos(q5) + (sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*sin(q5)), 0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*cos(q4) + sin(q4)*cos(q1)*cos(q2)) - 0.707106781186548*self.L4*((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2)) - 0.707106781186548*self.L5*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4))*cos(q5) + 0.707106781186548*self.L5*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4))*cos(q5), -0.707106781186548*self.L5*(-((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*sin(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q5)) + 0.707106781186548*self.L5*(-((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*sin(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q5)), 0, 0],
[0, -self.L2*cos(q2) + self.L3*sin(q2)*cos(q3) - self.L4*(-sin(q2)*sin(q4)*cos(q3) + cos(q2)*cos(q4)) + self.L5*((sin(q2)*cos(q3)*cos(q4) + sin(q4)*cos(q2))*cos(q5) - sin(q2)*sin(q3)*sin(q5)), self.L3*sin(q3)*cos(q2) + self.L4*sin(q3)*sin(q4)*cos(q2) + self.L5*(sin(q3)*cos(q2)*cos(q4)*cos(q5) + sin(q5)*cos(q2)*cos(q3)), -self.L4*(-sin(q2)*sin(q4) + cos(q2)*cos(q3)*cos(q4)) + self.L5*(sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3))*cos(q5), self.L5*(-(sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*sin(q5) + sin(q3)*cos(q2)*cos(q5)), 0, 0]
])
return z
def jacobi_GL(self, q):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
z = np.array([
[-0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) - 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1)) - 0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) - 0.707106781186548*self.L4*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) + sin(q1)*cos(q2)*cos(q4)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5)) + 0.707106781186548*self.L5*(((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2))*cos(q5) + (-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q5)) + self.L6*((0.707106781186548*((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + 0.707106781186548*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5))*sin(q6) + (0.707106781186548*((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2))*cos(q5) + 0.707106781186548*(-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q5))*sin(q6) + (0.707106781186548*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) + 0.707106781186548*cos(q1)*cos(q2)*cos(q4))*cos(q6) + (0.707106781186548*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*sin(q4) - 0.707106781186548*sin(q1)*cos(q2)*cos(q4))*cos(q6)), -0.707106781186548*self.L2*sin(q1)*sin(q2) - 0.707106781186548*self.L2*sin(q2)*cos(q1) - 0.707106781186548*self.L3*sin(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L3*cos(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L4*(sin(q1)*sin(q2)*cos(q4) + sin(q1)*sin(q4)*cos(q2)*cos(q3)) - 0.707106781186548*self.L4*(sin(q2)*cos(q1)*cos(q4) + sin(q4)*cos(q1)*cos(q2)*cos(q3)) + 0.707106781186548*self.L5*((sin(q1)*sin(q2)*sin(q4) - sin(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q1)*sin(q3)*sin(q5)*cos(q2)) + 0.707106781186548*self.L5*((sin(q2)*sin(q4)*cos(q1) - cos(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q3)*sin(q5)*cos(q1)*cos(q2)) + self.L6*((0.707106781186548*(sin(q1)*sin(q2)*sin(q4) - sin(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + 0.707106781186548*sin(q1)*sin(q3)*sin(q5)*cos(q2))*sin(q6) + (0.707106781186548*(sin(q2)*sin(q4)*cos(q1) - cos(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + 0.707106781186548*sin(q3)*sin(q5)*cos(q1)*cos(q2))*sin(q6) + (-0.707106781186548*sin(q1)*sin(q2)*cos(q4) - 0.707106781186548*sin(q1)*sin(q4)*cos(q2)*cos(q3))*cos(q6) + (-0.707106781186548*sin(q2)*cos(q1)*cos(q4) - 0.707106781186548*sin(q4)*cos(q1)*cos(q2)*cos(q3))*cos(q6)), 0.707106781186548*self.L3*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3)) - 0.707106781186548*self.L4*(sin(q1)*cos(q3) - sin(q2)*sin(q3)*cos(q1))*sin(q4) - 0.707106781186548*self.L4*(-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q4) + 0.707106781186548*self.L5*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q4)*cos(q5)) + 0.707106781186548*self.L5*((sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q4)*cos(q5) + (sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*sin(q5)) + self.L6*((0.707106781186548*(sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q5) + 0.707106781186548*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q4)*cos(q5))*sin(q6) + 0.707106781186548*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q4)*cos(q6) + (0.707106781186548*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q4)*cos(q5) + 0.707106781186548*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*sin(q5))*sin(q6) + 0.707106781186548*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*sin(q4)*cos(q6)), -0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*cos(q4) + sin(q4)*cos(q1)*cos(q2)) - 0.707106781186548*self.L4*((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2)) + 0.707106781186548*self.L5*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4))*cos(q5) + 0.707106781186548*self.L5*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4))*cos(q5) + self.L6*(0.707106781186548*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4))*sin(q6)*cos(q5) + (0.707106781186548*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - 0.707106781186548*sin(q4)*cos(q1)*cos(q2))*cos(q6) + 0.707106781186548*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4))*sin(q6)*cos(q5) + (0.707106781186548*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - 0.707106781186548*sin(q1)*sin(q4)*cos(q2))*cos(q6)), 0.707106781186548*self.L5*(-((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*sin(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q5)) + 0.707106781186548*self.L5*(-((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*sin(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q5)) + self.L6*((-0.707106781186548*((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*sin(q5) + 0.707106781186548*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q5))*sin(q6) + (-0.707106781186548*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*sin(q5) + 0.707106781186548*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q5))*sin(q6)), self.L6*((0.707106781186548*((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + 0.707106781186548*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5))*cos(q6) + (0.707106781186548*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*cos(q5) + 0.707106781186548*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*sin(q5))*cos(q6) - (0.707106781186548*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) + 0.707106781186548*cos(q1)*cos(q2)*cos(q4))*sin(q6) - (0.707106781186548*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) + 0.707106781186548*sin(q1)*cos(q2)*cos(q4))*sin(q6)), 0],
[0.707106781186548*self.L1*sin(q1) + 0.707106781186548*self.L1*cos(q1) + 0.707106781186548*self.L2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3)) - 0.707106781186548*self.L3*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1)) - 0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4)) + 0.707106781186548*self.L4*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) + sin(q1)*cos(q2)*cos(q4)) + 0.707106781186548*self.L5*(((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5)) - 0.707106781186548*self.L5*(((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2))*cos(q5) + (-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q5)) + self.L6*((0.707106781186548*((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) + 0.707106781186548*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5))*sin(q6) + (-0.707106781186548*((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2))*cos(q5) - 0.707106781186548*(-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q5))*sin(q6) + (0.707106781186548*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) + 0.707106781186548*cos(q1)*cos(q2)*cos(q4))*cos(q6) + (-0.707106781186548*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*sin(q4) + 0.707106781186548*sin(q1)*cos(q2)*cos(q4))*cos(q6)), -0.707106781186548*self.L2*sin(q1)*sin(q2) + 0.707106781186548*self.L2*sin(q2)*cos(q1) - 0.707106781186548*self.L3*sin(q1)*cos(q2)*cos(q3) + 0.707106781186548*self.L3*cos(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L4*(sin(q1)*sin(q2)*cos(q4) + sin(q1)*sin(q4)*cos(q2)*cos(q3)) + 0.707106781186548*self.L4*(sin(q2)*cos(q1)*cos(q4) + sin(q4)*cos(q1)*cos(q2)*cos(q3)) + 0.707106781186548*self.L5*((sin(q1)*sin(q2)*sin(q4) - sin(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q1)*sin(q3)*sin(q5)*cos(q2)) - 0.707106781186548*self.L5*((sin(q2)*sin(q4)*cos(q1) - cos(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q3)*sin(q5)*cos(q1)*cos(q2)) + self.L6*((0.707106781186548*(sin(q1)*sin(q2)*sin(q4) - sin(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) + 0.707106781186548*sin(q1)*sin(q3)*sin(q5)*cos(q2))*sin(q6) + (-0.707106781186548*(sin(q2)*sin(q4)*cos(q1) - cos(q1)*cos(q2)*cos(q3)*cos(q4))*cos(q5) - 0.707106781186548*sin(q3)*sin(q5)*cos(q1)*cos(q2))*sin(q6) + (-0.707106781186548*sin(q1)*sin(q2)*cos(q4) - 0.707106781186548*sin(q1)*sin(q4)*cos(q2)*cos(q3))*cos(q6) + (0.707106781186548*sin(q2)*cos(q1)*cos(q4) + 0.707106781186548*sin(q4)*cos(q1)*cos(q2)*cos(q3))*cos(q6)), -0.707106781186548*self.L3*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1)) + 0.707106781186548*self.L3*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3)) + 0.707106781186548*self.L4*(sin(q1)*cos(q3) - sin(q2)*sin(q3)*cos(q1))*sin(q4) - 0.707106781186548*self.L4*(-sin(q1)*sin(q2)*sin(q3) - cos(q1)*cos(q3))*sin(q4) - 0.707106781186548*self.L5*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q4)*cos(q5)) + 0.707106781186548*self.L5*((sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q4)*cos(q5) + (sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*sin(q5)) + self.L6*((-0.707106781186548*(sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*sin(q5) - 0.707106781186548*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q4)*cos(q5))*sin(q6) - 0.707106781186548*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q4)*cos(q6) + (0.707106781186548*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q4)*cos(q5) + 0.707106781186548*(sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*sin(q5))*sin(q6) + 0.707106781186548*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*sin(q4)*cos(q6)), 0.707106781186548*self.L4*((sin(q1)*sin(q3) + sin(q2)*cos(q1)*cos(q3))*cos(q4) + sin(q4)*cos(q1)*cos(q2)) - 0.707106781186548*self.L4*((sin(q1)*sin(q2)*cos(q3) - sin(q3)*cos(q1))*cos(q4) + sin(q1)*sin(q4)*cos(q2)) - 0.707106781186548*self.L5*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4))*cos(q5) + 0.707106781186548*self.L5*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4))*cos(q5) + self.L6*(-0.707106781186548*(-(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - cos(q1)*cos(q2)*cos(q4))*sin(q6)*cos(q5) + (-0.707106781186548*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) + 0.707106781186548*sin(q4)*cos(q1)*cos(q2))*cos(q6) + 0.707106781186548*(-(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) - sin(q1)*cos(q2)*cos(q4))*sin(q6)*cos(q5) + (0.707106781186548*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - 0.707106781186548*sin(q1)*sin(q4)*cos(q2))*cos(q6)), -0.707106781186548*self.L5*(-((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*sin(q5) + (-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q5)) + 0.707106781186548*self.L5*(-((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*sin(q5) + (sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q5)) + self.L6*((0.707106781186548*((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*sin(q5) - 0.707106781186548*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*cos(q5))*sin(q6) + (-0.707106781186548*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*sin(q5) + 0.707106781186548*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*cos(q5))*sin(q6)), self.L6*((-0.707106781186548*((-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*cos(q4) - sin(q4)*cos(q1)*cos(q2))*cos(q5) - 0.707106781186548*(-sin(q1)*cos(q3) + sin(q2)*sin(q3)*cos(q1))*sin(q5))*cos(q6) + (0.707106781186548*((-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*cos(q4) - sin(q1)*sin(q4)*cos(q2))*cos(q5) + 0.707106781186548*(sin(q1)*sin(q2)*sin(q3) + cos(q1)*cos(q3))*sin(q5))*cos(q6) - (-0.707106781186548*(-sin(q1)*sin(q3) - sin(q2)*cos(q1)*cos(q3))*sin(q4) - 0.707106781186548*cos(q1)*cos(q2)*cos(q4))*sin(q6) - (0.707106781186548*(-sin(q1)*sin(q2)*cos(q3) + sin(q3)*cos(q1))*sin(q4) + 0.707106781186548*sin(q1)*cos(q2)*cos(q4))*sin(q6)), 0],
[0, -self.L2*cos(q2) + self.L3*sin(q2)*cos(q3) - self.L4*(-sin(q2)*sin(q4)*cos(q3) + cos(q2)*cos(q4)) + self.L5*((sin(q2)*cos(q3)*cos(q4) + sin(q4)*cos(q2))*cos(q5) - sin(q2)*sin(q3)*sin(q5)) + self.L6*(((sin(q2)*cos(q3)*cos(q4) + sin(q4)*cos(q2))*cos(q5) - sin(q2)*sin(q3)*sin(q5))*sin(q6) + (sin(q2)*sin(q4)*cos(q3) - cos(q2)*cos(q4))*cos(q6)), self.L3*sin(q3)*cos(q2) + self.L4*sin(q3)*sin(q4)*cos(q2) + self.L5*(sin(q3)*cos(q2)*cos(q4)*cos(q5) + sin(q5)*cos(q2)*cos(q3)) + self.L6*((sin(q3)*cos(q2)*cos(q4)*cos(q5) + sin(q5)*cos(q2)*cos(q3))*sin(q6) + sin(q3)*sin(q4)*cos(q2)*cos(q6)), -self.L4*(-sin(q2)*sin(q4) + cos(q2)*cos(q3)*cos(q4)) + self.L5*(sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3))*cos(q5) + self.L6*((sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*cos(q6) + (sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3))*sin(q6)*cos(q5)), self.L5*(-(sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*sin(q5) + sin(q3)*cos(q2)*cos(q5)) + self.L6*(-(sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*sin(q5) + sin(q3)*cos(q2)*cos(q5))*sin(q6), self.L6*(((sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q3)*sin(q5)*cos(q2))*cos(q6) - (-sin(q2)*cos(q4) - sin(q4)*cos(q2)*cos(q3))*sin(q6)), 0]
])
return z
def jacobi_all(self, q):
"""ヤコビ行列を全部計算する
・
"""
jWo = self.jacobi_Wo(q)
jBL = self.jacobi_BL(q)
j0 = self.jacobi_0(q)
j1 = self.jacobi_1(q)
j2 = self.jacobi_2(q)
j3 = self.jacobi_3(q)
j4 = self.jacobi_4(q)
j5 = self.jacobi_5(q)
j6 = self.jacobi_6(q)
j7 = self.jacobi_7(q)
jGL = self.jacobi_GL(q)
return [jWo, jBL, j0, j1, j2, j3, j4, j5, j6, j7, jGL]
# ヤコビ行列の時間微分
def djacobi_Wo(self, q, dq):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
dq1, dq2, dq3, dq4, dq5, dq6, dq7 = dq[0, 0], dq[1, 0], dq[2, 0], dq[3, 0], dq[4, 0], dq[5, 0], dq[6, 0]
z = np.zeros((3, 7))
return z
def djacobi_BL(self, q, dq):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
dq1, dq2, dq3, dq4, dq5, dq6, dq7 = dq[0, 0], dq[1, 0], dq[2, 0], dq[3, 0], dq[4, 0], dq[5, 0], dq[6, 0]
z = np.zeros((3, 7))
return z
def djacobi_0(self, q, dq):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
dq1, dq2, dq3, dq4, dq5, dq6, dq7 = dq[0, 0], dq[1, 0], dq[2, 0], dq[3, 0], dq[4, 0], dq[5, 0], dq[6, 0]
z = np.zeros((3, 7))
return z
def djacobi_1(self, q, dq):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
dq1, dq2, dq3, dq4, dq5, dq6, dq7 = dq[0, 0], dq[1, 0], dq[2, 0], dq[3, 0], dq[4, 0], dq[5, 0], dq[6, 0]
z = np.zeros((3, 7))
return z
def djacobi_2(self, q, dq):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
dq1, dq2, dq3, dq4, dq5, dq6, dq7 = dq[0, 0], dq[1, 0], dq[2, 0], dq[3, 0], dq[4, 0], dq[5, 0], dq[6, 0]
z = np.array([
[
-0.707106781186548*self.L1*dq1*sin(q1) - 0.707106781186548*self.L1*dq1*cos(q1),
0,
0,
0,
0,
0,
0
],
[
-0.707106781186548*self.L1*dq1*sin(q1) + 0.707106781186548*self.L1*dq1*cos(q1),
0,
0,
0,
0,
0,
0
],
[
0,
0,
0,
0,
0,
0,
0
]
])
return z
def djacobi_3(self, q, dq):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
dq1, dq2, dq3, dq4, dq5, dq6, dq7 = dq[0, 0], dq[1, 0], dq[2, 0], dq[3, 0], dq[4, 0], dq[5, 0], dq[6, 0]
z = np.array([
[
-0.707106781186548*self.L1*dq1*sin(q1) - 0.707106781186548*self.L1*dq1*cos(q1) - 0.707106781186548*self.L2*dq1*sin(q1)*cos(q2) - 0.707106781186548*self.L2*dq1*cos(q1)*cos(q2) + 0.707106781186548*self.L2*dq2*sin(q1)*sin(q2) - 0.707106781186548*self.L2*dq2*sin(q2)*cos(q1),
0.707106781186548*self.L2*dq1*sin(q1)*sin(q2) - 0.707106781186548*self.L2*dq1*sin(q2)*cos(q1) - 0.707106781186548*self.L2*dq2*sin(q1)*cos(q2) - 0.707106781186548*self.L2*dq2*cos(q1)*cos(q2),
0,
0,
0,
0,
0
],
[
-0.707106781186548*self.L1*dq1*sin(q1) + 0.707106781186548*self.L1*dq1*cos(q1) - 0.707106781186548*self.L2*dq1*sin(q1)*cos(q2) + 0.707106781186548*self.L2*dq1*cos(q1)*cos(q2) - 0.707106781186548*self.L2*dq2*sin(q1)*sin(q2) - 0.707106781186548*self.L2*dq2*sin(q2)*cos(q1),
-0.707106781186548*self.L2*dq1*sin(q1)*sin(q2) - 0.707106781186548*self.L2*dq1*sin(q2)*cos(q1) - 0.707106781186548*self.L2*dq2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*dq2*cos(q1)*cos(q2),
0,
0,
0,
0,
0
],
[
0,
self.L2*dq2*sin(q2),
0,
0,
0,
0,
0
]
])
return z
def djacobi_4(self, q, dq):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
dq1, dq2, dq3, dq4, dq5, dq6, dq7 = dq[0, 0], dq[1, 0], dq[2, 0], dq[3, 0], dq[4, 0], dq[5, 0], dq[6, 0]
z = np.array([
[
-0.707106781186548*self.L1*dq1*sin(q1) - 0.707106781186548*self.L1*dq1*cos(q1) - 0.707106781186548*self.L2*dq1*sin(q1)*cos(q2) - 0.707106781186548*self.L2*dq1*cos(q1)*cos(q2) + 0.707106781186548*self.L2*dq2*sin(q1)*sin(q2) - 0.707106781186548*self.L2*dq2*sin(q2)*cos(q1) + 0.707106781186548*self.L3*(dq1*sin(q1)*sin(q3) + dq1*sin(q2)*cos(q1)*cos(q3) + dq2*sin(q1)*cos(q2)*cos(q3) - dq3*sin(q1)*sin(q2)*sin(q3) - dq3*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(dq1*sin(q1)*sin(q2)*cos(q3) - dq1*sin(q3)*cos(q1) - dq2*cos(q1)*cos(q2)*cos(q3) - dq3*sin(q1)*cos(q3) + dq3*sin(q2)*sin(q3)*cos(q1)),
0.707106781186548*self.L2*dq1*sin(q1)*sin(q2) - 0.707106781186548*self.L2*dq1*sin(q2)*cos(q1) - 0.707106781186548*self.L2*dq2*sin(q1)*cos(q2) - 0.707106781186548*self.L2*dq2*cos(q1)*cos(q2) + 0.707106781186548*self.L3*dq1*sin(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L3*dq1*cos(q1)*cos(q2)*cos(q3) + 0.707106781186548*self.L3*dq2*sin(q1)*sin(q2)*cos(q3) + 0.707106781186548*self.L3*dq2*sin(q2)*cos(q1)*cos(q3) + 0.707106781186548*self.L3*dq3*sin(q1)*sin(q3)*cos(q2) + 0.707106781186548*self.L3*dq3*sin(q3)*cos(q1)*cos(q2),
0.707106781186548*self.L3*(-dq1*sin(q1)*cos(q3) + dq1*sin(q2)*sin(q3)*cos(q1) + dq2*sin(q1)*sin(q3)*cos(q2) + dq3*sin(q1)*sin(q2)*cos(q3) - dq3*sin(q3)*cos(q1)) + 0.707106781186548*self.L3*(-dq1*sin(q1)*sin(q2)*sin(q3) - dq1*cos(q1)*cos(q3) + dq2*sin(q3)*cos(q1)*cos(q2) + dq3*sin(q1)*sin(q3) + dq3*sin(q2)*cos(q1)*cos(q3)),
0,
0,
0,
0
],
[
-0.707106781186548*self.L1*dq1*sin(q1) + 0.707106781186548*self.L1*dq1*cos(q1) - 0.707106781186548*self.L2*dq1*sin(q1)*cos(q2) + 0.707106781186548*self.L2*dq1*cos(q1)*cos(q2) - 0.707106781186548*self.L2*dq2*sin(q1)*sin(q2) - 0.707106781186548*self.L2*dq2*sin(q2)*cos(q1) - 0.707106781186548*self.L3*(dq1*sin(q1)*sin(q3) + dq1*sin(q2)*cos(q1)*cos(q3) + dq2*sin(q1)*cos(q2)*cos(q3) - dq3*sin(q1)*sin(q2)*sin(q3) - dq3*cos(q1)*cos(q3)) + 0.707106781186548*self.L3*(dq1*sin(q1)*sin(q2)*cos(q3) - dq1*sin(q3)*cos(q1) - dq2*cos(q1)*cos(q2)*cos(q3) - dq3*sin(q1)*cos(q3) + dq3*sin(q2)*sin(q3)*cos(q1)),
-0.707106781186548*self.L2*dq1*sin(q1)*sin(q2) - 0.707106781186548*self.L2*dq1*sin(q2)*cos(q1) - 0.707106781186548*self.L2*dq2*sin(q1)*cos(q2) + 0.707106781186548*self.L2*dq2*cos(q1)*cos(q2) - 0.707106781186548*self.L3*dq1*sin(q1)*cos(q2)*cos(q3) - 0.707106781186548*self.L3*dq1*cos(q1)*cos(q2)*cos(q3) + 0.707106781186548*self.L3*dq2*sin(q1)*sin(q2)*cos(q3) - 0.707106781186548*self.L3*dq2*sin(q2)*cos(q1)*cos(q3) + 0.707106781186548*self.L3*dq3*sin(q1)*sin(q3)*cos(q2) - 0.707106781186548*self.L3*dq3*sin(q3)*cos(q1)*cos(q2),
0.707106781186548*self.L3*(-dq1*sin(q1)*cos(q3) + dq1*sin(q2)*sin(q3)*cos(q1) + dq2*sin(q1)*sin(q3)*cos(q2) + dq3*sin(q1)*sin(q2)*cos(q3) - dq3*sin(q3)*cos(q1)) - 0.707106781186548*self.L3*(-dq1*sin(q1)*sin(q2)*sin(q3) - dq1*cos(q1)*cos(q3) + dq2*sin(q3)*cos(q1)*cos(q2) + dq3*sin(q1)*sin(q3) + dq3*sin(q2)*cos(q1)*cos(q3)),
0,
0,
0,
0
],
[
0,
self.L2*dq2*sin(q2) + self.L3*dq2*cos(q2)*cos(q3) - self.L3*dq3*sin(q2)*sin(q3),
-self.L3*dq2*sin(q2)*sin(q3) + self.L3*dq3*cos(q2)*cos(q3),
0,
0,
0,
0
]
])
return z
def djacobi_5(self, q, dq):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
dq1, dq2, dq3, dq4, dq5, dq6, dq7 = dq[0, 0], dq[1, 0], dq[2, 0], dq[3, 0], dq[4, 0], dq[5, 0], dq[6, 0]
z = np.array([
[
0.707106781186548*sqrt(2)*(-self.L1*dq1*sin(q1 + pi/4) - self.L2*dq1*sin(q1 + pi/4)*cos(q2) - self.L2*dq2*sin(q2)*cos(q1 + pi/4) + self.L3*dq1*sin(q1 + pi/4)*sin(q2)*cos(q3) - self.L3*dq1*sin(q3)*cos(q1 + pi/4) - self.L3*dq2*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L3*dq3*sin(q1 + pi/4)*cos(q3) + self.L3*dq3*sin(q2)*sin(q3)*cos(q1 + pi/4) + self.L4*dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq1*sin(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq1*sin(q3)*sin(q4)*cos(q1 + pi/4) - self.L4*dq2*sin(q2)*cos(q1 + pi/4)*cos(q4) - self.L4*dq2*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L4*dq3*sin(q1 + pi/4)*sin(q4)*cos(q3) + self.L4*dq3*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4) - self.L4*dq4*sin(q1 + pi/4)*sin(q3)*cos(q4) - self.L4*dq4*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq4*sin(q4)*cos(q1 + pi/4)*cos(q2)),
0.707106781186548*sqrt(2)*(-self.L2*dq1*sin(q2)*cos(q1 + pi/4) - self.L2*dq2*sin(q1 + pi/4)*cos(q2) - self.L3*dq1*cos(q1 + pi/4)*cos(q2)*cos(q3) + self.L3*dq2*sin(q1 + pi/4)*sin(q2)*cos(q3) + self.L3*dq3*sin(q1 + pi/4)*sin(q3)*cos(q2) - self.L4*dq1*sin(q2)*cos(q1 + pi/4)*cos(q4) - self.L4*dq1*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3) + self.L4*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq2*sin(q1 + pi/4)*cos(q2)*cos(q4) + self.L4*dq3*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q2) + self.L4*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4) - self.L4*dq4*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)),
0.707106781186548*sqrt(2)*(-self.L3*dq1*sin(q1 + pi/4)*cos(q3) + self.L3*dq1*sin(q2)*sin(q3)*cos(q1 + pi/4) + self.L3*dq2*sin(q1 + pi/4)*sin(q3)*cos(q2) + self.L3*dq3*sin(q1 + pi/4)*sin(q2)*cos(q3) - self.L3*dq3*sin(q3)*cos(q1 + pi/4) - self.L4*dq1*sin(q1 + pi/4)*sin(q4)*cos(q3) + self.L4*dq1*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4) + self.L4*dq2*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q2) + self.L4*dq3*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq3*sin(q3)*sin(q4)*cos(q1 + pi/4) + self.L4*dq4*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4) + self.L4*dq4*cos(q1 + pi/4)*cos(q3)*cos(q4)),
0.707106781186548*sqrt(2)*self.L4*(-dq1*sin(q1 + pi/4)*sin(q3)*cos(q4) - dq1*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4) - dq1*sin(q4)*cos(q1 + pi/4)*cos(q2) + dq2*sin(q1 + pi/4)*sin(q2)*sin(q4) - dq2*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4) + dq3*cos(q1 + pi/4)*cos(q3)*cos(q4) + dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - dq4*sin(q1 + pi/4)*cos(q2)*cos(q4) - dq4*sin(q3)*sin(q4)*cos(q1 + pi/4)),
0,
0,
0
],
[
0.707106781186548*sqrt(2)*(self.L1*dq1*cos(q1 + pi/4) + self.L2*dq1*cos(q1 + pi/4)*cos(q2) - self.L2*dq2*sin(q1 + pi/4)*sin(q2) - self.L3*dq1*sin(q1 + pi/4)*sin(q3) - self.L3*dq1*sin(q2)*cos(q1 + pi/4)*cos(q3) - self.L3*dq2*sin(q1 + pi/4)*cos(q2)*cos(q3) + self.L3*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3) + self.L3*dq3*cos(q1 + pi/4)*cos(q3) - self.L4*dq1*sin(q1 + pi/4)*sin(q3)*sin(q4) - self.L4*dq1*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq1*cos(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq2*sin(q1 + pi/4)*sin(q2)*cos(q4) - self.L4*dq2*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3) + self.L4*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4) + self.L4*dq3*sin(q4)*cos(q1 + pi/4)*cos(q3) - self.L4*dq4*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4) - self.L4*dq4*sin(q1 + pi/4)*sin(q4)*cos(q2) + self.L4*dq4*sin(q3)*cos(q1 + pi/4)*cos(q4)),
0.707106781186548*sqrt(2)*(-self.L2*dq1*sin(q1 + pi/4)*sin(q2) + self.L2*dq2*cos(q1 + pi/4)*cos(q2) - self.L3*dq1*sin(q1 + pi/4)*cos(q2)*cos(q3) - self.L3*dq2*sin(q2)*cos(q1 + pi/4)*cos(q3) - self.L3*dq3*sin(q3)*cos(q1 + pi/4)*cos(q2) - self.L4*dq1*sin(q1 + pi/4)*sin(q2)*cos(q4) - self.L4*dq1*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3) - self.L4*dq2*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq2*cos(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq3*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q2) - self.L4*dq4*sin(q2)*sin(q4)*cos(q1 + pi/4) + self.L4*dq4*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)),
0.707106781186548*sqrt(2)*(self.L3*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3) + self.L3*dq1*cos(q1 + pi/4)*cos(q3) - self.L3*dq2*sin(q3)*cos(q1 + pi/4)*cos(q2) - self.L3*dq3*sin(q1 + pi/4)*sin(q3) - self.L3*dq3*sin(q2)*cos(q1 + pi/4)*cos(q3) + self.L4*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4) + self.L4*dq1*sin(q4)*cos(q1 + pi/4)*cos(q3) - self.L4*dq2*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q2) - self.L4*dq3*sin(q1 + pi/4)*sin(q3)*sin(q4) - self.L4*dq3*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq4*sin(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq4*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4)),
0.707106781186548*sqrt(2)*self.L4*(-dq1*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4) - dq1*sin(q1 + pi/4)*sin(q4)*cos(q2) + dq1*sin(q3)*cos(q1 + pi/4)*cos(q4) - dq2*sin(q2)*sin(q4)*cos(q1 + pi/4) + dq2*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + dq3*sin(q1 + pi/4)*cos(q3)*cos(q4) - dq3*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4) - dq4*sin(q1 + pi/4)*sin(q3)*sin(q4) - dq4*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + dq4*cos(q1 + pi/4)*cos(q2)*cos(q4)),
0,
0,
0
],
[
0,
self.L2*dq2*sin(q2) + self.L3*dq2*cos(q2)*cos(q3) - self.L3*dq3*sin(q2)*sin(q3) + self.L4*(dq2*sin(q2)*cos(q4) + dq2*sin(q4)*cos(q2)*cos(q3) - dq3*sin(q2)*sin(q3)*sin(q4) + dq4*sin(q2)*cos(q3)*cos(q4) + dq4*sin(q4)*cos(q2)),
-self.L3*dq2*sin(q2)*sin(q3) + self.L3*dq3*cos(q2)*cos(q3) - self.L4*dq2*sin(q2)*sin(q3)*sin(q4) + self.L4*dq3*sin(q4)*cos(q2)*cos(q3) + self.L4*dq4*sin(q3)*cos(q2)*cos(q4),
self.L4*(dq2*sin(q2)*cos(q3)*cos(q4) + dq2*sin(q4)*cos(q2) + dq3*sin(q3)*cos(q2)*cos(q4) + dq4*sin(q2)*cos(q4) + dq4*sin(q4)*cos(q2)*cos(q3)),
0,
0,
0
]
])
return z
def djacobi_6(self, q, dq):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
dq1, dq2, dq3, dq4, dq5, dq6, dq7 = dq[0, 0], dq[1, 0], dq[2, 0], dq[3, 0], dq[4, 0], dq[5, 0], dq[6, 0]
z = np.array([
[
0.707106781186548*sqrt(2)*(-self.L1*dq1*sin(q1 + pi/4) - self.L2*dq1*sin(q1 + pi/4)*cos(q2) - self.L2*dq2*sin(q2)*cos(q1 + pi/4) + self.L3*dq1*sin(q1 + pi/4)*sin(q2)*cos(q3) - self.L3*dq1*sin(q3)*cos(q1 + pi/4) - self.L3*dq2*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L3*dq3*sin(q1 + pi/4)*cos(q3) + self.L3*dq3*sin(q2)*sin(q3)*cos(q1 + pi/4) + self.L4*dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq1*sin(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq1*sin(q3)*sin(q4)*cos(q1 + pi/4) - self.L4*dq2*sin(q2)*cos(q1 + pi/4)*cos(q4) - self.L4*dq2*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L4*dq3*sin(q1 + pi/4)*sin(q4)*cos(q3) + self.L4*dq3*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4) - self.L4*dq4*sin(q1 + pi/4)*sin(q3)*cos(q4) - self.L4*dq4*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq4*sin(q4)*cos(q1 + pi/4)*cos(q2) - self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq1*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5) - self.L5*dq1*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq1*sin(q5)*cos(q1 + pi/4)*cos(q3) + self.L5*dq2*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q5) + self.L5*dq2*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q2) - self.L5*dq2*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q3)*sin(q5) - self.L5*dq3*sin(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq3*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) + self.L5*dq3*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3) + self.L5*dq4*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q5) + self.L5*dq4*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5) - self.L5*dq4*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q4) - self.L5*dq5*sin(q1 + pi/4)*cos(q3)*cos(q5) + self.L5*dq5*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q5) + self.L5*dq5*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4) + self.L5*dq5*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q2)),
0.707106781186548*sqrt(2)*(-self.L2*dq1*sin(q2)*cos(q1 + pi/4) - self.L2*dq2*sin(q1 + pi/4)*cos(q2) - self.L3*dq1*cos(q1 + pi/4)*cos(q2)*cos(q3) + self.L3*dq2*sin(q1 + pi/4)*sin(q2)*cos(q3) + self.L3*dq3*sin(q1 + pi/4)*sin(q3)*cos(q2) - self.L4*dq1*sin(q2)*cos(q1 + pi/4)*cos(q4) - self.L4*dq1*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3) + self.L4*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq2*sin(q1 + pi/4)*cos(q2)*cos(q4) + self.L4*dq3*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q2) + self.L4*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4) - self.L4*dq4*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + self.L5*dq1*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q5) + self.L5*dq1*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q2) - self.L5*dq1*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq2*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q3)*cos(q2)*cos(q4)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q3) + self.L5*dq4*sin(q1 + pi/4)*sin(q2)*cos(q4)*cos(q5) + self.L5*dq4*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q3)*cos(q2)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q3)*cos(q4)),
0.707106781186548*sqrt(2)*(-self.L3*dq1*sin(q1 + pi/4)*cos(q3) + self.L3*dq1*sin(q2)*sin(q3)*cos(q1 + pi/4) + self.L3*dq2*sin(q1 + pi/4)*sin(q3)*cos(q2) + self.L3*dq3*sin(q1 + pi/4)*sin(q2)*cos(q3) - self.L3*dq3*sin(q3)*cos(q1 + pi/4) - self.L4*dq1*sin(q1 + pi/4)*sin(q4)*cos(q3) + self.L4*dq1*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4) + self.L4*dq2*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q2) + self.L4*dq3*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq3*sin(q3)*sin(q4)*cos(q1 + pi/4) + self.L4*dq4*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4) + self.L4*dq4*cos(q1 + pi/4)*cos(q3)*cos(q4) + self.L5*dq1*sin(q1 + pi/4)*sin(q3)*sin(q5) - self.L5*dq1*sin(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq1*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) + self.L5*dq1*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3) + self.L5*dq2*sin(q1 + pi/4)*sin(q3)*cos(q2)*cos(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q3) - self.L5*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq3*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq3*sin(q5)*cos(q1 + pi/4)*cos(q3) - self.L5*dq4*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4)*cos(q5) - self.L5*dq4*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*cos(q4) + self.L5*dq5*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q5) - self.L5*dq5*sin(q3)*cos(q1 + pi/4)*cos(q5) - self.L5*dq5*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4)),
0.707106781186548*sqrt(2)*(-self.L4*dq1*sin(q1 + pi/4)*sin(q3)*cos(q4) - self.L4*dq1*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq1*sin(q4)*cos(q1 + pi/4)*cos(q2) + self.L4*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4) - self.L4*dq2*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + self.L4*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4) + self.L4*dq3*cos(q1 + pi/4)*cos(q3)*cos(q4) + self.L4*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq4*sin(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq4*sin(q3)*sin(q4)*cos(q1 + pi/4) + self.L5*dq1*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q5) + self.L5*dq1*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5) - self.L5*dq1*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q2)*cos(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3)*cos(q5) - self.L5*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4)*cos(q5) - self.L5*dq3*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5) + self.L5*dq4*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq4*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5) - self.L5*dq4*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5)*cos(q3) + self.L5*dq5*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q4) + self.L5*dq5*sin(q3)*sin(q4)*sin(q5)*cos(q1 + pi/4)),
0.707106781186548*sqrt(2)*self.L5*(dq1*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q4) - dq1*sin(q1 + pi/4)*cos(q3)*cos(q5) + dq1*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q5) + dq1*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4) + dq1*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q2) - dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5) + dq2*sin(q1 + pi/4)*sin(q3)*cos(q2)*cos(q5) + dq2*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q3)*cos(q4) - dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*cos(q4) + dq3*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q5) - dq3*sin(q3)*cos(q1 + pi/4)*cos(q5) - dq3*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4) - dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5)*cos(q3) + dq4*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q4) + dq4*sin(q3)*sin(q4)*sin(q5)*cos(q1 + pi/4) - dq5*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5) + dq5*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) + dq5*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5) - dq5*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) - dq5*sin(q5)*cos(q1 + pi/4)*cos(q3)),
0,
0
],
[
0.707106781186548*sqrt(2)*(self.L1*dq1*cos(q1 + pi/4) + self.L2*dq1*cos(q1 + pi/4)*cos(q2) - self.L2*dq2*sin(q1 + pi/4)*sin(q2) - self.L3*dq1*sin(q1 + pi/4)*sin(q3) - self.L3*dq1*sin(q2)*cos(q1 + pi/4)*cos(q3) - self.L3*dq2*sin(q1 + pi/4)*cos(q2)*cos(q3) + self.L3*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3) + self.L3*dq3*cos(q1 + pi/4)*cos(q3) - self.L4*dq1*sin(q1 + pi/4)*sin(q3)*sin(q4) - self.L4*dq1*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq1*cos(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq2*sin(q1 + pi/4)*sin(q2)*cos(q4) - self.L4*dq2*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3) + self.L4*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4) + self.L4*dq3*sin(q4)*cos(q1 + pi/4)*cos(q3) - self.L4*dq4*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4) - self.L4*dq4*sin(q1 + pi/4)*sin(q4)*cos(q2) + self.L4*dq4*sin(q3)*cos(q1 + pi/4)*cos(q4) - self.L5*dq1*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5) - self.L5*dq1*sin(q1 + pi/4)*sin(q5)*cos(q3) + self.L5*dq1*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4) - self.L5*dq1*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq1*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q2) - self.L5*dq2*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3) - self.L5*dq3*sin(q3)*sin(q5)*cos(q1 + pi/4) + self.L5*dq3*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3)*cos(q5) - self.L5*dq4*sin(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L5*dq4*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3)*cos(q4) + self.L5*dq5*sin(q1 + pi/4)*sin(q4)*sin(q5)*cos(q2) - self.L5*dq5*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4) + self.L5*dq5*cos(q1 + pi/4)*cos(q3)*cos(q5)),
0.707106781186548*sqrt(2)*(-self.L2*dq1*sin(q1 + pi/4)*sin(q2) + self.L2*dq2*cos(q1 + pi/4)*cos(q2) - self.L3*dq1*sin(q1 + pi/4)*cos(q2)*cos(q3) - self.L3*dq2*sin(q2)*cos(q1 + pi/4)*cos(q3) - self.L3*dq3*sin(q3)*cos(q1 + pi/4)*cos(q2) - self.L4*dq1*sin(q1 + pi/4)*sin(q2)*cos(q4) - self.L4*dq1*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3) - self.L4*dq2*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq2*cos(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq3*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q2) - self.L4*dq4*sin(q2)*sin(q4)*cos(q1 + pi/4) + self.L4*dq4*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q5) + self.L5*dq1*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q2) - self.L5*dq1*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq2*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4) - self.L5*dq2*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq2*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5) - self.L5*dq3*sin(q3)*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L5*dq3*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L5*dq4*sin(q2)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq4*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q5) + self.L5*dq5*sin(q2)*sin(q4)*sin(q5)*cos(q1 + pi/4) - self.L5*dq5*sin(q3)*cos(q1 + pi/4)*cos(q2)*cos(q5) - self.L5*dq5*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)),
0.707106781186548*sqrt(2)*(self.L3*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3) + self.L3*dq1*cos(q1 + pi/4)*cos(q3) - self.L3*dq2*sin(q3)*cos(q1 + pi/4)*cos(q2) - self.L3*dq3*sin(q1 + pi/4)*sin(q3) - self.L3*dq3*sin(q2)*cos(q1 + pi/4)*cos(q3) + self.L4*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4) + self.L4*dq1*sin(q4)*cos(q1 + pi/4)*cos(q3) - self.L4*dq2*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q2) - self.L4*dq3*sin(q1 + pi/4)*sin(q3)*sin(q4) - self.L4*dq3*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq4*sin(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq4*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4)*cos(q5) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3) - self.L5*dq1*sin(q3)*sin(q5)*cos(q1 + pi/4) + self.L5*dq1*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq2*sin(q3)*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L5*dq2*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L5*dq3*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5) - self.L5*dq3*sin(q1 + pi/4)*sin(q5)*cos(q3) + self.L5*dq3*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4) - self.L5*dq3*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq4*sin(q1 + pi/4)*sin(q4)*cos(q3)*cos(q5) + self.L5*dq4*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q3)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q5)*cos(q3)*cos(q4) + self.L5*dq5*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4) - self.L5*dq5*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q5)),
0.707106781186548*sqrt(2)*(-self.L4*dq1*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4) - self.L4*dq1*sin(q1 + pi/4)*sin(q4)*cos(q2) + self.L4*dq1*sin(q3)*cos(q1 + pi/4)*cos(q4) - self.L4*dq2*sin(q2)*sin(q4)*cos(q1 + pi/4) + self.L4*dq2*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + self.L4*dq3*sin(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq3*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4) - self.L4*dq4*sin(q1 + pi/4)*sin(q3)*sin(q4) - self.L4*dq4*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq4*cos(q1 + pi/4)*cos(q2)*cos(q4) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3)*cos(q5) - self.L5*dq1*sin(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L5*dq1*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5) - self.L5*dq2*sin(q2)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq2*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q5) - self.L5*dq3*sin(q1 + pi/4)*sin(q4)*cos(q3)*cos(q5) + self.L5*dq3*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5) - self.L5*dq4*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5) - self.L5*dq4*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq4*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q3)*sin(q4)*sin(q5) + self.L5*dq5*sin(q2)*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q3) - self.L5*dq5*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q4)),
0.707106781186548*sqrt(2)*self.L5*(dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q5) + dq1*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3)*cos(q4) + dq1*sin(q1 + pi/4)*sin(q4)*sin(q5)*cos(q2) - dq1*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4) + dq1*cos(q1 + pi/4)*cos(q3)*cos(q5) + dq2*sin(q2)*sin(q4)*sin(q5)*cos(q1 + pi/4) - dq2*sin(q3)*cos(q1 + pi/4)*cos(q2)*cos(q5) - dq2*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) - dq3*sin(q1 + pi/4)*sin(q3)*cos(q5) - dq3*sin(q1 + pi/4)*sin(q5)*cos(q3)*cos(q4) + dq3*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4) - dq3*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q5) + dq4*sin(q1 + pi/4)*sin(q3)*sin(q4)*sin(q5) + dq4*sin(q2)*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q3) - dq4*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q4) - dq5*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5) - dq5*sin(q1 + pi/4)*sin(q5)*cos(q3) + dq5*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4) - dq5*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - dq5*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5)),
0,
0
],
[
0,
self.L2*dq2*sin(q2) + self.L3*dq2*cos(q2)*cos(q3) - self.L3*dq3*sin(q2)*sin(q3) + self.L4*(dq2*sin(q2)*cos(q4) + dq2*sin(q4)*cos(q2)*cos(q3) - dq3*sin(q2)*sin(q3)*sin(q4) + dq4*sin(q2)*cos(q3)*cos(q4) + dq4*sin(q4)*cos(q2)) - self.L5*((sin(q2)*cos(q3)*cos(q4) + sin(q4)*cos(q2))*dq5*sin(q5) + (dq2*sin(q2)*sin(q4) - dq2*cos(q2)*cos(q3)*cos(q4) + dq3*sin(q2)*sin(q3)*cos(q4) + dq4*sin(q2)*sin(q4)*cos(q3) - dq4*cos(q2)*cos(q4))*cos(q5) + dq2*sin(q3)*sin(q5)*cos(q2) + dq3*sin(q2)*sin(q5)*cos(q3) + dq5*sin(q2)*sin(q3)*cos(q5)),
-self.L3*dq2*sin(q2)*sin(q3) + self.L3*dq3*cos(q2)*cos(q3) - self.L4*dq2*sin(q2)*sin(q3)*sin(q4) + self.L4*dq3*sin(q4)*cos(q2)*cos(q3) + self.L4*dq4*sin(q3)*cos(q2)*cos(q4) - self.L5*(dq2*sin(q2)*sin(q3)*cos(q4)*cos(q5) + dq2*sin(q2)*sin(q5)*cos(q3) + dq3*sin(q3)*sin(q5)*cos(q2) - dq3*cos(q2)*cos(q3)*cos(q4)*cos(q5) + dq4*sin(q3)*sin(q4)*cos(q2)*cos(q5) + dq5*sin(q3)*sin(q5)*cos(q2)*cos(q4) - dq5*cos(q2)*cos(q3)*cos(q5)),
self.L4*(dq2*sin(q2)*cos(q3)*cos(q4) + dq2*sin(q4)*cos(q2) + dq3*sin(q3)*cos(q2)*cos(q4) + dq4*sin(q2)*cos(q4) + dq4*sin(q4)*cos(q2)*cos(q3)) - self.L5*(sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3))*dq5*sin(q5) - self.L5*(dq2*sin(q2)*sin(q4)*cos(q3) - dq2*cos(q2)*cos(q4) + dq3*sin(q3)*sin(q4)*cos(q2) + dq4*sin(q2)*sin(q4) - dq4*cos(q2)*cos(q3)*cos(q4))*cos(q5),
-self.L5*((sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*dq5*cos(q5) + (dq2*sin(q2)*cos(q3)*cos(q4) + dq2*sin(q4)*cos(q2) + dq3*sin(q3)*cos(q2)*cos(q4) + dq4*sin(q2)*cos(q4) + dq4*sin(q4)*cos(q2)*cos(q3))*sin(q5) + dq2*sin(q2)*sin(q3)*cos(q5) - dq3*cos(q2)*cos(q3)*cos(q5) + dq5*sin(q3)*sin(q5)*cos(q2)),
0,
0
]
])
return z
def djacobi_7(self, q, dq):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
dq1, dq2, dq3, dq4, dq5, dq6, dq7 = dq[0, 0], dq[1, 0], dq[2, 0], dq[3, 0], dq[4, 0], dq[5, 0], dq[6, 0]
z = np.array([
[
0.707106781186548*sqrt(2)*(-self.L1*dq1*sin(q1 + pi/4) - self.L2*dq1*sin(q1 + pi/4)*cos(q2) - self.L2*dq2*sin(q2)*cos(q1 + pi/4) + self.L3*dq1*sin(q1 + pi/4)*sin(q2)*cos(q3) - self.L3*dq1*sin(q3)*cos(q1 + pi/4) - self.L3*dq2*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L3*dq3*sin(q1 + pi/4)*cos(q3) + self.L3*dq3*sin(q2)*sin(q3)*cos(q1 + pi/4) + self.L4*dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq1*sin(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq1*sin(q3)*sin(q4)*cos(q1 + pi/4) - self.L4*dq2*sin(q2)*cos(q1 + pi/4)*cos(q4) - self.L4*dq2*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L4*dq3*sin(q1 + pi/4)*sin(q4)*cos(q3) + self.L4*dq3*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4) - self.L4*dq4*sin(q1 + pi/4)*sin(q3)*cos(q4) - self.L4*dq4*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq4*sin(q4)*cos(q1 + pi/4)*cos(q2) - self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq1*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5) - self.L5*dq1*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq1*sin(q5)*cos(q1 + pi/4)*cos(q3) + self.L5*dq2*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q5) + self.L5*dq2*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q2) - self.L5*dq2*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q3)*sin(q5) - self.L5*dq3*sin(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq3*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) + self.L5*dq3*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3) + self.L5*dq4*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q5) + self.L5*dq4*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5) - self.L5*dq4*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q4) - self.L5*dq5*sin(q1 + pi/4)*cos(q3)*cos(q5) + self.L5*dq5*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q5) + self.L5*dq5*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4) + self.L5*dq5*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q2)),
0.707106781186548*sqrt(2)*(-self.L2*dq1*sin(q2)*cos(q1 + pi/4) - self.L2*dq2*sin(q1 + pi/4)*cos(q2) - self.L3*dq1*cos(q1 + pi/4)*cos(q2)*cos(q3) + self.L3*dq2*sin(q1 + pi/4)*sin(q2)*cos(q3) + self.L3*dq3*sin(q1 + pi/4)*sin(q3)*cos(q2) - self.L4*dq1*sin(q2)*cos(q1 + pi/4)*cos(q4) - self.L4*dq1*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3) + self.L4*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq2*sin(q1 + pi/4)*cos(q2)*cos(q4) + self.L4*dq3*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q2) + self.L4*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4) - self.L4*dq4*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + self.L5*dq1*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q5) + self.L5*dq1*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q2) - self.L5*dq1*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq2*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q3)*cos(q2)*cos(q4)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q3) + self.L5*dq4*sin(q1 + pi/4)*sin(q2)*cos(q4)*cos(q5) + self.L5*dq4*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q3)*cos(q2)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q3)*cos(q4)),
0.707106781186548*sqrt(2)*(-self.L3*dq1*sin(q1 + pi/4)*cos(q3) + self.L3*dq1*sin(q2)*sin(q3)*cos(q1 + pi/4) + self.L3*dq2*sin(q1 + pi/4)*sin(q3)*cos(q2) + self.L3*dq3*sin(q1 + pi/4)*sin(q2)*cos(q3) - self.L3*dq3*sin(q3)*cos(q1 + pi/4) - self.L4*dq1*sin(q1 + pi/4)*sin(q4)*cos(q3) + self.L4*dq1*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4) + self.L4*dq2*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q2) + self.L4*dq3*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq3*sin(q3)*sin(q4)*cos(q1 + pi/4) + self.L4*dq4*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4) + self.L4*dq4*cos(q1 + pi/4)*cos(q3)*cos(q4) + self.L5*dq1*sin(q1 + pi/4)*sin(q3)*sin(q5) - self.L5*dq1*sin(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq1*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) + self.L5*dq1*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3) + self.L5*dq2*sin(q1 + pi/4)*sin(q3)*cos(q2)*cos(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q3) - self.L5*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq3*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq3*sin(q5)*cos(q1 + pi/4)*cos(q3) - self.L5*dq4*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4)*cos(q5) - self.L5*dq4*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*cos(q4) + self.L5*dq5*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q5) - self.L5*dq5*sin(q3)*cos(q1 + pi/4)*cos(q5) - self.L5*dq5*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4)),
0.707106781186548*sqrt(2)*(-self.L4*dq1*sin(q1 + pi/4)*sin(q3)*cos(q4) - self.L4*dq1*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq1*sin(q4)*cos(q1 + pi/4)*cos(q2) + self.L4*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4) - self.L4*dq2*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + self.L4*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4) + self.L4*dq3*cos(q1 + pi/4)*cos(q3)*cos(q4) + self.L4*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq4*sin(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq4*sin(q3)*sin(q4)*cos(q1 + pi/4) + self.L5*dq1*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q5) + self.L5*dq1*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5) - self.L5*dq1*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q2)*cos(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3)*cos(q5) - self.L5*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4)*cos(q5) - self.L5*dq3*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5) + self.L5*dq4*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq4*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5) - self.L5*dq4*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5)*cos(q3) + self.L5*dq5*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q4) + self.L5*dq5*sin(q3)*sin(q4)*sin(q5)*cos(q1 + pi/4)),
0.707106781186548*sqrt(2)*self.L5*(dq1*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q4) - dq1*sin(q1 + pi/4)*cos(q3)*cos(q5) + dq1*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q5) + dq1*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4) + dq1*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q2) - dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5) + dq2*sin(q1 + pi/4)*sin(q3)*cos(q2)*cos(q5) + dq2*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q3)*cos(q4) - dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*cos(q4) + dq3*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q5) - dq3*sin(q3)*cos(q1 + pi/4)*cos(q5) - dq3*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4) - dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5)*cos(q3) + dq4*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q4) + dq4*sin(q3)*sin(q4)*sin(q5)*cos(q1 + pi/4) - dq5*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5) + dq5*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) + dq5*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5) - dq5*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) - dq5*sin(q5)*cos(q1 + pi/4)*cos(q3)),
0,
0
],
[
0.707106781186548*sqrt(2)*(self.L1*dq1*cos(q1 + pi/4) + self.L2*dq1*cos(q1 + pi/4)*cos(q2) - self.L2*dq2*sin(q1 + pi/4)*sin(q2) - self.L3*dq1*sin(q1 + pi/4)*sin(q3) - self.L3*dq1*sin(q2)*cos(q1 + pi/4)*cos(q3) - self.L3*dq2*sin(q1 + pi/4)*cos(q2)*cos(q3) + self.L3*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3) + self.L3*dq3*cos(q1 + pi/4)*cos(q3) - self.L4*dq1*sin(q1 + pi/4)*sin(q3)*sin(q4) - self.L4*dq1*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq1*cos(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq2*sin(q1 + pi/4)*sin(q2)*cos(q4) - self.L4*dq2*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3) + self.L4*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4) + self.L4*dq3*sin(q4)*cos(q1 + pi/4)*cos(q3) - self.L4*dq4*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4) - self.L4*dq4*sin(q1 + pi/4)*sin(q4)*cos(q2) + self.L4*dq4*sin(q3)*cos(q1 + pi/4)*cos(q4) - self.L5*dq1*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5) - self.L5*dq1*sin(q1 + pi/4)*sin(q5)*cos(q3) + self.L5*dq1*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4) - self.L5*dq1*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq1*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q2) - self.L5*dq2*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3) - self.L5*dq3*sin(q3)*sin(q5)*cos(q1 + pi/4) + self.L5*dq3*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3)*cos(q5) - self.L5*dq4*sin(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L5*dq4*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3)*cos(q4) + self.L5*dq5*sin(q1 + pi/4)*sin(q4)*sin(q5)*cos(q2) - self.L5*dq5*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4) + self.L5*dq5*cos(q1 + pi/4)*cos(q3)*cos(q5)),
0.707106781186548*sqrt(2)*(-self.L2*dq1*sin(q1 + pi/4)*sin(q2) + self.L2*dq2*cos(q1 + pi/4)*cos(q2) - self.L3*dq1*sin(q1 + pi/4)*cos(q2)*cos(q3) - self.L3*dq2*sin(q2)*cos(q1 + pi/4)*cos(q3) - self.L3*dq3*sin(q3)*cos(q1 + pi/4)*cos(q2) - self.L4*dq1*sin(q1 + pi/4)*sin(q2)*cos(q4) - self.L4*dq1*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3) - self.L4*dq2*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq2*cos(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq3*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q2) - self.L4*dq4*sin(q2)*sin(q4)*cos(q1 + pi/4) + self.L4*dq4*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q5) + self.L5*dq1*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q2) - self.L5*dq1*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq2*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4) - self.L5*dq2*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq2*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5) - self.L5*dq3*sin(q3)*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L5*dq3*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L5*dq4*sin(q2)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq4*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q5) + self.L5*dq5*sin(q2)*sin(q4)*sin(q5)*cos(q1 + pi/4) - self.L5*dq5*sin(q3)*cos(q1 + pi/4)*cos(q2)*cos(q5) - self.L5*dq5*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)),
0.707106781186548*sqrt(2)*(self.L3*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3) + self.L3*dq1*cos(q1 + pi/4)*cos(q3) - self.L3*dq2*sin(q3)*cos(q1 + pi/4)*cos(q2) - self.L3*dq3*sin(q1 + pi/4)*sin(q3) - self.L3*dq3*sin(q2)*cos(q1 + pi/4)*cos(q3) + self.L4*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4) + self.L4*dq1*sin(q4)*cos(q1 + pi/4)*cos(q3) - self.L4*dq2*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q2) - self.L4*dq3*sin(q1 + pi/4)*sin(q3)*sin(q4) - self.L4*dq3*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq4*sin(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq4*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4)*cos(q5) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3) - self.L5*dq1*sin(q3)*sin(q5)*cos(q1 + pi/4) + self.L5*dq1*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq2*sin(q3)*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L5*dq2*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L5*dq3*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5) - self.L5*dq3*sin(q1 + pi/4)*sin(q5)*cos(q3) + self.L5*dq3*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4) - self.L5*dq3*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq4*sin(q1 + pi/4)*sin(q4)*cos(q3)*cos(q5) + self.L5*dq4*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q3)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q5)*cos(q3)*cos(q4) + self.L5*dq5*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4) - self.L5*dq5*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q5)),
0.707106781186548*sqrt(2)*(-self.L4*dq1*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4) - self.L4*dq1*sin(q1 + pi/4)*sin(q4)*cos(q2) + self.L4*dq1*sin(q3)*cos(q1 + pi/4)*cos(q4) - self.L4*dq2*sin(q2)*sin(q4)*cos(q1 + pi/4) + self.L4*dq2*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + self.L4*dq3*sin(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq3*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4) - self.L4*dq4*sin(q1 + pi/4)*sin(q3)*sin(q4) - self.L4*dq4*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq4*cos(q1 + pi/4)*cos(q2)*cos(q4) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3)*cos(q5) - self.L5*dq1*sin(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L5*dq1*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5) - self.L5*dq2*sin(q2)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq2*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q5) - self.L5*dq3*sin(q1 + pi/4)*sin(q4)*cos(q3)*cos(q5) + self.L5*dq3*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5) - self.L5*dq4*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5) - self.L5*dq4*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq4*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q3)*sin(q4)*sin(q5) + self.L5*dq5*sin(q2)*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q3) - self.L5*dq5*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q4)),
0.707106781186548*sqrt(2)*self.L5*(dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q5) + dq1*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3)*cos(q4) + dq1*sin(q1 + pi/4)*sin(q4)*sin(q5)*cos(q2) - dq1*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4) + dq1*cos(q1 + pi/4)*cos(q3)*cos(q5) + dq2*sin(q2)*sin(q4)*sin(q5)*cos(q1 + pi/4) - dq2*sin(q3)*cos(q1 + pi/4)*cos(q2)*cos(q5) - dq2*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) - dq3*sin(q1 + pi/4)*sin(q3)*cos(q5) - dq3*sin(q1 + pi/4)*sin(q5)*cos(q3)*cos(q4) + dq3*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4) - dq3*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q5) + dq4*sin(q1 + pi/4)*sin(q3)*sin(q4)*sin(q5) + dq4*sin(q2)*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q3) - dq4*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q4) - dq5*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5) - dq5*sin(q1 + pi/4)*sin(q5)*cos(q3) + dq5*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4) - dq5*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - dq5*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5)),
0,
0
],
[
0,
self.L2*dq2*sin(q2) + self.L3*dq2*cos(q2)*cos(q3) - self.L3*dq3*sin(q2)*sin(q3) + self.L4*(dq2*sin(q2)*cos(q4) + dq2*sin(q4)*cos(q2)*cos(q3) - dq3*sin(q2)*sin(q3)*sin(q4) + dq4*sin(q2)*cos(q3)*cos(q4) + dq4*sin(q4)*cos(q2)) - self.L5*((sin(q2)*cos(q3)*cos(q4) + sin(q4)*cos(q2))*dq5*sin(q5) + (dq2*sin(q2)*sin(q4) - dq2*cos(q2)*cos(q3)*cos(q4) + dq3*sin(q2)*sin(q3)*cos(q4) + dq4*sin(q2)*sin(q4)*cos(q3) - dq4*cos(q2)*cos(q4))*cos(q5) + dq2*sin(q3)*sin(q5)*cos(q2) + dq3*sin(q2)*sin(q5)*cos(q3) + dq5*sin(q2)*sin(q3)*cos(q5)),
-self.L3*dq2*sin(q2)*sin(q3) + self.L3*dq3*cos(q2)*cos(q3) - self.L4*dq2*sin(q2)*sin(q3)*sin(q4) + self.L4*dq3*sin(q4)*cos(q2)*cos(q3) + self.L4*dq4*sin(q3)*cos(q2)*cos(q4) - self.L5*(dq2*sin(q2)*sin(q3)*cos(q4)*cos(q5) + dq2*sin(q2)*sin(q5)*cos(q3) + dq3*sin(q3)*sin(q5)*cos(q2) - dq3*cos(q2)*cos(q3)*cos(q4)*cos(q5) + dq4*sin(q3)*sin(q4)*cos(q2)*cos(q5) + dq5*sin(q3)*sin(q5)*cos(q2)*cos(q4) - dq5*cos(q2)*cos(q3)*cos(q5)),
self.L4*(dq2*sin(q2)*cos(q3)*cos(q4) + dq2*sin(q4)*cos(q2) + dq3*sin(q3)*cos(q2)*cos(q4) + dq4*sin(q2)*cos(q4) + dq4*sin(q4)*cos(q2)*cos(q3)) - self.L5*(sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3))*dq5*sin(q5) - self.L5*(dq2*sin(q2)*sin(q4)*cos(q3) - dq2*cos(q2)*cos(q4) + dq3*sin(q3)*sin(q4)*cos(q2) + dq4*sin(q2)*sin(q4) - dq4*cos(q2)*cos(q3)*cos(q4))*cos(q5),
-self.L5*((sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*dq5*cos(q5) + (dq2*sin(q2)*cos(q3)*cos(q4) + dq2*sin(q4)*cos(q2) + dq3*sin(q3)*cos(q2)*cos(q4) + dq4*sin(q2)*cos(q4) + dq4*sin(q4)*cos(q2)*cos(q3))*sin(q5) + dq2*sin(q2)*sin(q3)*cos(q5) - dq3*cos(q2)*cos(q3)*cos(q5) + dq5*sin(q3)*sin(q5)*cos(q2)),
0,
0
]
])
return z
def djacobi_GL(self, q, dq):
q1, q2, q3, q4, q5, q6, q7 = q[0, 0], q[1, 0], q[2, 0], q[3, 0], q[4, 0], q[5, 0], q[6, 0]
dq1, dq2, dq3, dq4, dq5, dq6, dq7 = dq[0, 0], dq[1, 0], dq[2, 0], dq[3, 0], dq[4, 0], dq[5, 0], dq[6, 0]
z = np.array([
[
0.707106781186548*sqrt(2)*(-self.L1*dq1*sin(q1 + pi/4) - self.L2*dq1*sin(q1 + pi/4)*cos(q2) - self.L2*dq2*sin(q2)*cos(q1 + pi/4) + self.L3*dq1*sin(q1 + pi/4)*sin(q2)*cos(q3) - self.L3*dq1*sin(q3)*cos(q1 + pi/4) - self.L3*dq2*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L3*dq3*sin(q1 + pi/4)*cos(q3) + self.L3*dq3*sin(q2)*sin(q3)*cos(q1 + pi/4) + self.L4*dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq1*sin(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq1*sin(q3)*sin(q4)*cos(q1 + pi/4) - self.L4*dq2*sin(q2)*cos(q1 + pi/4)*cos(q4) - self.L4*dq2*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L4*dq3*sin(q1 + pi/4)*sin(q4)*cos(q3) + self.L4*dq3*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4) - self.L4*dq4*sin(q1 + pi/4)*sin(q3)*cos(q4) - self.L4*dq4*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq4*sin(q4)*cos(q1 + pi/4)*cos(q2) - self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq1*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5) - self.L5*dq1*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq1*sin(q5)*cos(q1 + pi/4)*cos(q3) + self.L5*dq2*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q5) + self.L5*dq2*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q2) - self.L5*dq2*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q3)*sin(q5) - self.L5*dq3*sin(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq3*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) + self.L5*dq3*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3) + self.L5*dq4*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q5) + self.L5*dq4*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5) - self.L5*dq4*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q4) - self.L5*dq5*sin(q1 + pi/4)*cos(q3)*cos(q5) + self.L5*dq5*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q5) + self.L5*dq5*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4) + self.L5*dq5*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q2) - self.L6*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*sin(q6) + self.L6*dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3)*cos(q6) + self.L6*dq1*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q3)*cos(q4)*cos(q5) + self.L6*dq1*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q2)*cos(q5) - self.L6*dq1*sin(q1 + pi/4)*cos(q2)*cos(q4)*cos(q6) - self.L6*dq1*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q6) - self.L6*dq1*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L6*dq1*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q3) + self.L6*dq2*sin(q2)*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q5) - self.L6*dq2*sin(q2)*cos(q1 + pi/4)*cos(q4)*cos(q6) + self.L6*dq2*sin(q3)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q2) - self.L6*dq2*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q6) - self.L6*dq2*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) + self.L6*dq3*sin(q1 + pi/4)*sin(q3)*sin(q5)*sin(q6) - self.L6*dq3*sin(q1 + pi/4)*sin(q4)*cos(q3)*cos(q6) - self.L6*dq3*sin(q1 + pi/4)*sin(q6)*cos(q3)*cos(q4)*cos(q5) + self.L6*dq3*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q6) + self.L6*dq3*sin(q2)*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q4)*cos(q5) + self.L6*dq3*sin(q2)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q3) + self.L6*dq4*sin(q1 + pi/4)*sin(q3)*sin(q4)*sin(q6)*cos(q5) - self.L6*dq4*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q6) + self.L6*dq4*sin(q2)*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q5) - self.L6*dq4*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q6) - self.L6*dq4*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q6) - self.L6*dq4*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) + self.L6*dq5*sin(q1 + pi/4)*sin(q3)*sin(q5)*sin(q6)*cos(q4) - self.L6*dq5*sin(q1 + pi/4)*sin(q6)*cos(q3)*cos(q5) + self.L6*dq5*sin(q2)*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q5) + self.L6*dq5*sin(q2)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4) + self.L6*dq5*sin(q4)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q2) + self.L6*dq6*sin(q1 + pi/4)*sin(q3)*sin(q4)*sin(q6) - self.L6*dq6*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5)*cos(q6) - self.L6*dq6*sin(q1 + pi/4)*sin(q5)*cos(q3)*cos(q6) + self.L6*dq6*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q6) + self.L6*dq6*sin(q2)*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q3) - self.L6*dq6*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5)*cos(q6) - self.L6*dq6*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5)*cos(q6) - self.L6*dq6*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q4)),
0.707106781186548*sqrt(2)*(-self.L2*dq1*sin(q2)*cos(q1 + pi/4) - self.L2*dq2*sin(q1 + pi/4)*cos(q2) - self.L3*dq1*cos(q1 + pi/4)*cos(q2)*cos(q3) + self.L3*dq2*sin(q1 + pi/4)*sin(q2)*cos(q3) + self.L3*dq3*sin(q1 + pi/4)*sin(q3)*cos(q2) - self.L4*dq1*sin(q2)*cos(q1 + pi/4)*cos(q4) - self.L4*dq1*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3) + self.L4*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq2*sin(q1 + pi/4)*cos(q2)*cos(q4) + self.L4*dq3*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q2) + self.L4*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4) - self.L4*dq4*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + self.L5*dq1*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q5) + self.L5*dq1*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q2) - self.L5*dq1*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq2*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q3)*cos(q2)*cos(q4)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q3) + self.L5*dq4*sin(q1 + pi/4)*sin(q2)*cos(q4)*cos(q5) + self.L5*dq4*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q3)*cos(q2)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q3)*cos(q4) + self.L6*dq1*sin(q2)*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q5) - self.L6*dq1*sin(q2)*cos(q1 + pi/4)*cos(q4)*cos(q6) + self.L6*dq1*sin(q3)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q2) - self.L6*dq1*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q6) - self.L6*dq1*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) - self.L6*dq2*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*sin(q6) + self.L6*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3)*cos(q6) + self.L6*dq2*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q3)*cos(q4)*cos(q5) + self.L6*dq2*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q2)*cos(q5) - self.L6*dq2*sin(q1 + pi/4)*cos(q2)*cos(q4)*cos(q6) + self.L6*dq3*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q2)*cos(q6) + self.L6*dq3*sin(q1 + pi/4)*sin(q3)*sin(q6)*cos(q2)*cos(q4)*cos(q5) + self.L6*dq3*sin(q1 + pi/4)*sin(q5)*sin(q6)*cos(q2)*cos(q3) + self.L6*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q6) + self.L6*dq4*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q4)*cos(q5) + self.L6*dq4*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q2)*cos(q3)*cos(q5) - self.L6*dq4*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q6) - self.L6*dq5*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5)*sin(q6) + self.L6*dq5*sin(q1 + pi/4)*sin(q3)*sin(q6)*cos(q2)*cos(q5) + self.L6*dq5*sin(q1 + pi/4)*sin(q5)*sin(q6)*cos(q2)*cos(q3)*cos(q4) + self.L6*dq6*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q5)*cos(q6) + self.L6*dq6*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q4) + self.L6*dq6*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q2)*cos(q6) + self.L6*dq6*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q2)*cos(q3) - self.L6*dq6*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5)*cos(q6)),
0.707106781186548*sqrt(2)*(-self.L3*dq1*sin(q1 + pi/4)*cos(q3) + self.L3*dq1*sin(q2)*sin(q3)*cos(q1 + pi/4) + self.L3*dq2*sin(q1 + pi/4)*sin(q3)*cos(q2) + self.L3*dq3*sin(q1 + pi/4)*sin(q2)*cos(q3) - self.L3*dq3*sin(q3)*cos(q1 + pi/4) - self.L4*dq1*sin(q1 + pi/4)*sin(q4)*cos(q3) + self.L4*dq1*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4) + self.L4*dq2*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q2) + self.L4*dq3*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq3*sin(q3)*sin(q4)*cos(q1 + pi/4) + self.L4*dq4*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4) + self.L4*dq4*cos(q1 + pi/4)*cos(q3)*cos(q4) + self.L5*dq1*sin(q1 + pi/4)*sin(q3)*sin(q5) - self.L5*dq1*sin(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq1*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) + self.L5*dq1*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3) + self.L5*dq2*sin(q1 + pi/4)*sin(q3)*cos(q2)*cos(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q3) - self.L5*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq3*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq3*sin(q5)*cos(q1 + pi/4)*cos(q3) - self.L5*dq4*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4)*cos(q5) - self.L5*dq4*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*cos(q4) + self.L5*dq5*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q5) - self.L5*dq5*sin(q3)*cos(q1 + pi/4)*cos(q5) - self.L5*dq5*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4) + self.L6*dq1*sin(q1 + pi/4)*sin(q3)*sin(q5)*sin(q6) - self.L6*dq1*sin(q1 + pi/4)*sin(q4)*cos(q3)*cos(q6) - self.L6*dq1*sin(q1 + pi/4)*sin(q6)*cos(q3)*cos(q4)*cos(q5) + self.L6*dq1*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q6) + self.L6*dq1*sin(q2)*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q4)*cos(q5) + self.L6*dq1*sin(q2)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q3) + self.L6*dq2*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q2)*cos(q6) + self.L6*dq2*sin(q1 + pi/4)*sin(q3)*sin(q6)*cos(q2)*cos(q4)*cos(q5) + self.L6*dq2*sin(q1 + pi/4)*sin(q5)*sin(q6)*cos(q2)*cos(q3) - self.L6*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*sin(q6) + self.L6*dq3*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3)*cos(q6) + self.L6*dq3*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q3)*cos(q4)*cos(q5) - self.L6*dq3*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q6) - self.L6*dq3*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L6*dq3*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q3) - self.L6*dq4*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4)*sin(q6)*cos(q5) + self.L6*dq4*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4)*cos(q6) - self.L6*dq4*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q5) + self.L6*dq4*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q6) - self.L6*dq5*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*sin(q6)*cos(q4) + self.L6*dq5*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q3)*cos(q5) - self.L6*dq5*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q5) - self.L6*dq5*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4) - self.L6*dq6*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4)*sin(q6) + self.L6*dq6*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4)*cos(q5)*cos(q6) + self.L6*dq6*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3)*cos(q6) - self.L6*dq6*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q6) - self.L6*dq6*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q3) + self.L6*dq6*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5)*cos(q6)),
0.707106781186548*sqrt(2)*(-self.L4*dq1*sin(q1 + pi/4)*sin(q3)*cos(q4) - self.L4*dq1*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq1*sin(q4)*cos(q1 + pi/4)*cos(q2) + self.L4*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4) - self.L4*dq2*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + self.L4*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4) + self.L4*dq3*cos(q1 + pi/4)*cos(q3)*cos(q4) + self.L4*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3) - self.L4*dq4*sin(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq4*sin(q3)*sin(q4)*cos(q1 + pi/4) + self.L5*dq1*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q5) + self.L5*dq1*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5) - self.L5*dq1*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q2)*cos(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3)*cos(q5) - self.L5*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4)*cos(q5) - self.L5*dq3*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5) + self.L5*dq4*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq4*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5) - self.L5*dq4*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5)*cos(q3) + self.L5*dq5*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q4) + self.L5*dq5*sin(q3)*sin(q4)*sin(q5)*cos(q1 + pi/4) + self.L6*dq1*sin(q1 + pi/4)*sin(q3)*sin(q4)*sin(q6)*cos(q5) - self.L6*dq1*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q6) + self.L6*dq1*sin(q2)*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q5) - self.L6*dq1*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q6) - self.L6*dq1*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q6) - self.L6*dq1*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) + self.L6*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q6) + self.L6*dq2*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q4)*cos(q5) + self.L6*dq2*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q2)*cos(q3)*cos(q5) - self.L6*dq2*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q6) - self.L6*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4)*sin(q6)*cos(q5) + self.L6*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4)*cos(q6) - self.L6*dq3*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q5) + self.L6*dq3*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q6) + self.L6*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3)*cos(q6) + self.L6*dq4*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q3)*cos(q4)*cos(q5) + self.L6*dq4*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q2)*cos(q5) - self.L6*dq4*sin(q1 + pi/4)*cos(q2)*cos(q4)*cos(q6) - self.L6*dq4*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q6) - self.L6*dq4*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L6*dq5*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5)*sin(q6)*cos(q3) + self.L6*dq5*sin(q1 + pi/4)*sin(q5)*sin(q6)*cos(q2)*cos(q4) + self.L6*dq5*sin(q3)*sin(q4)*sin(q5)*sin(q6)*cos(q1 + pi/4) + self.L6*dq6*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3)*cos(q5)*cos(q6) + self.L6*dq6*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q3)*cos(q4) + self.L6*dq6*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q2) - self.L6*dq6*sin(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5)*cos(q6) - self.L6*dq6*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5)*cos(q6) - self.L6*dq6*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q4)),
0.707106781186548*sqrt(2)*(self.L5*dq1*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q4) - self.L5*dq1*sin(q1 + pi/4)*cos(q3)*cos(q5) + self.L5*dq1*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q5) + self.L5*dq1*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4) + self.L5*dq1*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q2) - self.L5*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q3)*cos(q2)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q3)*cos(q4) - self.L5*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*cos(q4) + self.L5*dq3*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q5) - self.L5*dq3*sin(q3)*cos(q1 + pi/4)*cos(q5) - self.L5*dq3*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4) - self.L5*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5)*cos(q3) + self.L5*dq4*sin(q1 + pi/4)*sin(q5)*cos(q2)*cos(q4) + self.L5*dq4*sin(q3)*sin(q4)*sin(q5)*cos(q1 + pi/4) - self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5) - self.L5*dq5*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq5*sin(q5)*cos(q1 + pi/4)*cos(q3) + self.L6*dq1*sin(q1 + pi/4)*sin(q3)*sin(q5)*sin(q6)*cos(q4) - self.L6*dq1*sin(q1 + pi/4)*sin(q6)*cos(q3)*cos(q5) + self.L6*dq1*sin(q2)*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q5) + self.L6*dq1*sin(q2)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4) + self.L6*dq1*sin(q4)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q2) - self.L6*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5)*sin(q6) + self.L6*dq2*sin(q1 + pi/4)*sin(q3)*sin(q6)*cos(q2)*cos(q5) + self.L6*dq2*sin(q1 + pi/4)*sin(q5)*sin(q6)*cos(q2)*cos(q3)*cos(q4) - self.L6*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*sin(q6)*cos(q4) + self.L6*dq3*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q3)*cos(q5) - self.L6*dq3*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q5) - self.L6*dq3*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4) - self.L6*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q5)*sin(q6)*cos(q3) + self.L6*dq4*sin(q1 + pi/4)*sin(q5)*sin(q6)*cos(q2)*cos(q4) + self.L6*dq4*sin(q3)*sin(q4)*sin(q5)*sin(q6)*cos(q1 + pi/4) - self.L6*dq5*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*sin(q6) + self.L6*dq5*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q3)*cos(q4)*cos(q5) + self.L6*dq5*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q2)*cos(q5) - self.L6*dq5*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L6*dq5*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q3) + self.L6*dq6*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q5)*cos(q6) + self.L6*dq6*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3)*cos(q4)*cos(q6) + self.L6*dq6*sin(q1 + pi/4)*sin(q4)*sin(q5)*cos(q2)*cos(q6) - self.L6*dq6*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4)*cos(q6) + self.L6*dq6*cos(q1 + pi/4)*cos(q3)*cos(q5)*cos(q6)),
0.707106781186548*sqrt(2)*self.L6*(dq1*sin(q1 + pi/4)*sin(q3)*sin(q4)*sin(q6) - dq1*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5)*cos(q6) - dq1*sin(q1 + pi/4)*sin(q5)*cos(q3)*cos(q6) + dq1*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q6) + dq1*sin(q2)*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q3) - dq1*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5)*cos(q6) - dq1*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5)*cos(q6) - dq1*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q4) + dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q5)*cos(q6) + dq2*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q4) + dq2*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q2)*cos(q6) + dq2*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q2)*cos(q3) - dq2*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5)*cos(q6) - dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4)*sin(q6) + dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4)*cos(q5)*cos(q6) + dq3*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3)*cos(q6) - dq3*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q6) - dq3*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q3) + dq3*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5)*cos(q6) + dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3)*cos(q5)*cos(q6) + dq4*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q3)*cos(q4) + dq4*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q2) - dq4*sin(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5)*cos(q6) - dq4*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5)*cos(q6) - dq4*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q4) + dq5*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q5)*cos(q6) + dq5*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3)*cos(q4)*cos(q6) + dq5*sin(q1 + pi/4)*sin(q4)*sin(q5)*cos(q2)*cos(q6) - dq5*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4)*cos(q6) + dq5*cos(q1 + pi/4)*cos(q3)*cos(q5)*cos(q6) - dq6*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*sin(q6) + dq6*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3)*cos(q6) + dq6*sin(q1 + pi/4)*sin(q2)*sin(q6)*cos(q3)*cos(q4)*cos(q5) + dq6*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q2)*cos(q5) - dq6*sin(q1 + pi/4)*cos(q2)*cos(q4)*cos(q6) - dq6*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q6) - dq6*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q4)*cos(q5) - dq6*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q3)),
0
],
[
0.707106781186548*sqrt(2)*(self.L1*dq1*cos(q1 + pi/4) + self.L2*dq1*cos(q1 + pi/4)*cos(q2) - self.L2*dq2*sin(q1 + pi/4)*sin(q2) - self.L3*dq1*sin(q1 + pi/4)*sin(q3) - self.L3*dq1*sin(q2)*cos(q1 + pi/4)*cos(q3) - self.L3*dq2*sin(q1 + pi/4)*cos(q2)*cos(q3) + self.L3*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3) + self.L3*dq3*cos(q1 + pi/4)*cos(q3) - self.L4*dq1*sin(q1 + pi/4)*sin(q3)*sin(q4) - self.L4*dq1*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq1*cos(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq2*sin(q1 + pi/4)*sin(q2)*cos(q4) - self.L4*dq2*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3) + self.L4*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4) + self.L4*dq3*sin(q4)*cos(q1 + pi/4)*cos(q3) - self.L4*dq4*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4) - self.L4*dq4*sin(q1 + pi/4)*sin(q4)*cos(q2) + self.L4*dq4*sin(q3)*cos(q1 + pi/4)*cos(q4) - self.L5*dq1*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5) - self.L5*dq1*sin(q1 + pi/4)*sin(q5)*cos(q3) + self.L5*dq1*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4) - self.L5*dq1*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq1*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q5) + self.L5*dq2*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q2) - self.L5*dq2*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4)*cos(q5) + self.L5*dq3*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3) - self.L5*dq3*sin(q3)*sin(q5)*cos(q1 + pi/4) + self.L5*dq3*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3)*cos(q5) - self.L5*dq4*sin(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L5*dq4*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3)*cos(q4) + self.L5*dq5*sin(q1 + pi/4)*sin(q4)*sin(q5)*cos(q2) - self.L5*dq5*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4) + self.L5*dq5*cos(q1 + pi/4)*cos(q3)*cos(q5) - self.L6*dq1*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q6) - self.L6*dq1*sin(q1 + pi/4)*sin(q3)*sin(q6)*cos(q4)*cos(q5) - self.L6*dq1*sin(q1 + pi/4)*sin(q5)*sin(q6)*cos(q3) + self.L6*dq1*sin(q2)*sin(q3)*sin(q5)*sin(q6)*cos(q1 + pi/4) - self.L6*dq1*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q6) - self.L6*dq1*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L6*dq1*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q5) + self.L6*dq1*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q6) + self.L6*dq2*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q6)*cos(q5) - self.L6*dq2*sin(q1 + pi/4)*sin(q2)*cos(q4)*cos(q6) + self.L6*dq2*sin(q1 + pi/4)*sin(q3)*sin(q5)*sin(q6)*cos(q2) - self.L6*dq2*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3)*cos(q6) - self.L6*dq2*sin(q1 + pi/4)*sin(q6)*cos(q2)*cos(q3)*cos(q4)*cos(q5) + self.L6*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4)*cos(q6) + self.L6*dq3*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q6)*cos(q4)*cos(q5) + self.L6*dq3*sin(q1 + pi/4)*sin(q2)*sin(q5)*sin(q6)*cos(q3) - self.L6*dq3*sin(q3)*sin(q5)*sin(q6)*cos(q1 + pi/4) + self.L6*dq3*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q6) + self.L6*dq3*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) + self.L6*dq4*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q6)*cos(q3)*cos(q5) - self.L6*dq4*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q6) - self.L6*dq4*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q6) - self.L6*dq4*sin(q1 + pi/4)*sin(q6)*cos(q2)*cos(q4)*cos(q5) - self.L6*dq4*sin(q3)*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q5) + self.L6*dq4*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q6) + self.L6*dq5*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q6)*cos(q5) + self.L6*dq5*sin(q1 + pi/4)*sin(q2)*sin(q5)*sin(q6)*cos(q3)*cos(q4) + self.L6*dq5*sin(q1 + pi/4)*sin(q4)*sin(q5)*sin(q6)*cos(q2) - self.L6*dq5*sin(q3)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q4) + self.L6*dq5*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q5) + self.L6*dq6*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*cos(q6) + self.L6*dq6*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q6)*cos(q3) - self.L6*dq6*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5)*cos(q6) - self.L6*dq6*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5)*cos(q6) - self.L6*dq6*sin(q1 + pi/4)*sin(q6)*cos(q2)*cos(q4) - self.L6*dq6*sin(q3)*sin(q4)*sin(q6)*cos(q1 + pi/4) + self.L6*dq6*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5)*cos(q6) + self.L6*dq6*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q6)),
0.707106781186548*sqrt(2)*(-self.L2*dq1*sin(q1 + pi/4)*sin(q2) + self.L2*dq2*cos(q1 + pi/4)*cos(q2) - self.L3*dq1*sin(q1 + pi/4)*cos(q2)*cos(q3) - self.L3*dq2*sin(q2)*cos(q1 + pi/4)*cos(q3) - self.L3*dq3*sin(q3)*cos(q1 + pi/4)*cos(q2) - self.L4*dq1*sin(q1 + pi/4)*sin(q2)*cos(q4) - self.L4*dq1*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3) - self.L4*dq2*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq2*cos(q1 + pi/4)*cos(q2)*cos(q4) - self.L4*dq3*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q2) - self.L4*dq4*sin(q2)*sin(q4)*cos(q1 + pi/4) + self.L4*dq4*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q5) + self.L5*dq1*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q2) - self.L5*dq1*sin(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5) + self.L5*dq2*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4) - self.L5*dq2*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq2*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5) - self.L5*dq3*sin(q3)*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L5*dq3*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L5*dq4*sin(q2)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq4*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q5) + self.L5*dq5*sin(q2)*sin(q4)*sin(q5)*cos(q1 + pi/4) - self.L5*dq5*sin(q3)*cos(q1 + pi/4)*cos(q2)*cos(q5) - self.L5*dq5*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + self.L6*dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q6)*cos(q5) - self.L6*dq1*sin(q1 + pi/4)*sin(q2)*cos(q4)*cos(q6) + self.L6*dq1*sin(q1 + pi/4)*sin(q3)*sin(q5)*sin(q6)*cos(q2) - self.L6*dq1*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q3)*cos(q6) - self.L6*dq1*sin(q1 + pi/4)*sin(q6)*cos(q2)*cos(q3)*cos(q4)*cos(q5) + self.L6*dq2*sin(q2)*sin(q3)*sin(q5)*sin(q6)*cos(q1 + pi/4) - self.L6*dq2*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q6) - self.L6*dq2*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L6*dq2*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q5) + self.L6*dq2*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q6) - self.L6*dq3*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q6) - self.L6*dq3*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L6*dq3*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L6*dq4*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q6) - self.L6*dq4*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L6*dq4*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q5) + self.L6*dq4*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q6) + self.L6*dq5*sin(q2)*sin(q4)*sin(q5)*sin(q6)*cos(q1 + pi/4) - self.L6*dq5*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q5) - self.L6*dq5*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) - self.L6*dq6*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q5)*cos(q6) - self.L6*dq6*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q4) - self.L6*dq6*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q6) - self.L6*dq6*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q3) + self.L6*dq6*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5)*cos(q6)), 0.707106781186548*sqrt(2)*(self.L3*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3) + self.L3*dq1*cos(q1 + pi/4)*cos(q3) - self.L3*dq2*sin(q3)*cos(q1 + pi/4)*cos(q2) - self.L3*dq3*sin(q1 + pi/4)*sin(q3) - self.L3*dq3*sin(q2)*cos(q1 + pi/4)*cos(q3) + self.L4*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4) + self.L4*dq1*sin(q4)*cos(q1 + pi/4)*cos(q3) - self.L4*dq2*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q2) - self.L4*dq3*sin(q1 + pi/4)*sin(q3)*sin(q4) - self.L4*dq3*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq4*sin(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq4*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q4)*cos(q5) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3) - self.L5*dq1*sin(q3)*sin(q5)*cos(q1 + pi/4) + self.L5*dq1*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq2*sin(q3)*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L5*dq2*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L5*dq3*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5) - self.L5*dq3*sin(q1 + pi/4)*sin(q5)*cos(q3) + self.L5*dq3*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4) - self.L5*dq3*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq4*sin(q1 + pi/4)*sin(q4)*cos(q3)*cos(q5) + self.L5*dq4*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q3)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q5)*cos(q3)*cos(q4) + self.L5*dq5*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4) - self.L5*dq5*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q5) + self.L6*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q4)*cos(q6) + self.L6*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q6)*cos(q4)*cos(q5) + self.L6*dq1*sin(q1 + pi/4)*sin(q2)*sin(q5)*sin(q6)*cos(q3) - self.L6*dq1*sin(q3)*sin(q5)*sin(q6)*cos(q1 + pi/4) + self.L6*dq1*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q6) + self.L6*dq1*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L6*dq2*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q6) - self.L6*dq2*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L6*dq2*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q3) - self.L6*dq3*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q6) - self.L6*dq3*sin(q1 + pi/4)*sin(q3)*sin(q6)*cos(q4)*cos(q5) - self.L6*dq3*sin(q1 + pi/4)*sin(q5)*sin(q6)*cos(q3) + self.L6*dq3*sin(q2)*sin(q3)*sin(q5)*sin(q6)*cos(q1 + pi/4) - self.L6*dq3*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q6) - self.L6*dq3*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L6*dq4*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q3)*cos(q5) + self.L6*dq4*sin(q1 + pi/4)*cos(q3)*cos(q4)*cos(q6) + self.L6*dq4*sin(q2)*sin(q3)*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q5) - self.L6*dq4*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q6) - self.L6*dq5*sin(q1 + pi/4)*sin(q3)*sin(q6)*cos(q5) - self.L6*dq5*sin(q1 + pi/4)*sin(q5)*sin(q6)*cos(q3)*cos(q4) + self.L6*dq5*sin(q2)*sin(q3)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q4) - self.L6*dq5*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q5) - self.L6*dq6*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q6) - self.L6*dq6*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q3) + self.L6*dq6*sin(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5)*cos(q6) + self.L6*dq6*sin(q2)*sin(q3)*sin(q4)*sin(q6)*cos(q1 + pi/4) - self.L6*dq6*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5)*cos(q6) - self.L6*dq6*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q6)),
0.707106781186548*sqrt(2)*(-self.L4*dq1*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4) - self.L4*dq1*sin(q1 + pi/4)*sin(q4)*cos(q2) + self.L4*dq1*sin(q3)*cos(q1 + pi/4)*cos(q4) - self.L4*dq2*sin(q2)*sin(q4)*cos(q1 + pi/4) + self.L4*dq2*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) + self.L4*dq3*sin(q1 + pi/4)*cos(q3)*cos(q4) - self.L4*dq3*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4) - self.L4*dq4*sin(q1 + pi/4)*sin(q3)*sin(q4) - self.L4*dq4*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3) + self.L4*dq4*cos(q1 + pi/4)*cos(q2)*cos(q4) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*cos(q3)*cos(q5) - self.L5*dq1*sin(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5) - self.L5*dq1*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5) - self.L5*dq2*sin(q2)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L5*dq2*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q5) - self.L5*dq3*sin(q1 + pi/4)*sin(q4)*cos(q3)*cos(q5) + self.L5*dq3*sin(q2)*sin(q3)*sin(q4)*cos(q1 + pi/4)*cos(q5) - self.L5*dq4*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5) - self.L5*dq4*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq4*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5) + self.L5*dq5*sin(q1 + pi/4)*sin(q3)*sin(q4)*sin(q5) + self.L5*dq5*sin(q2)*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q3) - self.L5*dq5*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q4) + self.L6*dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q6)*cos(q3)*cos(q5) - self.L6*dq1*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q6) - self.L6*dq1*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q6) - self.L6*dq1*sin(q1 + pi/4)*sin(q6)*cos(q2)*cos(q4)*cos(q5) - self.L6*dq1*sin(q3)*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q5) + self.L6*dq1*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q6) - self.L6*dq2*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q6) - self.L6*dq2*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q4)*cos(q5) - self.L6*dq2*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q5) + self.L6*dq2*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q6) - self.L6*dq3*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q3)*cos(q5) + self.L6*dq3*sin(q1 + pi/4)*cos(q3)*cos(q4)*cos(q6) + self.L6*dq3*sin(q2)*sin(q3)*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q5) - self.L6*dq3*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q6) - self.L6*dq4*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q6) - self.L6*dq4*sin(q1 + pi/4)*sin(q3)*sin(q6)*cos(q4)*cos(q5) - self.L6*dq4*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q6) - self.L6*dq4*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L6*dq4*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q5) + self.L6*dq4*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q6) + self.L6*dq5*sin(q1 + pi/4)*sin(q3)*sin(q4)*sin(q5)*sin(q6) + self.L6*dq5*sin(q2)*sin(q4)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q3) - self.L6*dq5*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q4) - self.L6*dq6*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q5)*cos(q6) - self.L6*dq6*sin(q1 + pi/4)*sin(q3)*sin(q6)*cos(q4) - self.L6*dq6*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5)*cos(q6) - self.L6*dq6*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4) - self.L6*dq6*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q2) + self.L6*dq6*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5)*cos(q6)),
0.707106781186548*sqrt(2)*(self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*cos(q5) + self.L5*dq1*sin(q1 + pi/4)*sin(q2)*sin(q5)*cos(q3)*cos(q4) + self.L5*dq1*sin(q1 + pi/4)*sin(q4)*sin(q5)*cos(q2) - self.L5*dq1*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4) + self.L5*dq1*cos(q1 + pi/4)*cos(q3)*cos(q5) + self.L5*dq2*sin(q2)*sin(q4)*sin(q5)*cos(q1 + pi/4) - self.L5*dq2*sin(q3)*cos(q1 + pi/4)*cos(q2)*cos(q5) - self.L5*dq2*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) - self.L5*dq3*sin(q1 + pi/4)*sin(q3)*cos(q5) - self.L5*dq3*sin(q1 + pi/4)*sin(q5)*cos(q3)*cos(q4) + self.L5*dq3*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q4) - self.L5*dq3*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q5) + self.L5*dq4*sin(q1 + pi/4)*sin(q3)*sin(q4)*sin(q5) + self.L5*dq4*sin(q2)*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q3) - self.L5*dq4*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q4) - self.L5*dq5*sin(q1 + pi/4)*sin(q3)*cos(q4)*cos(q5) - self.L5*dq5*sin(q1 + pi/4)*sin(q5)*cos(q3) + self.L5*dq5*sin(q2)*sin(q3)*sin(q5)*cos(q1 + pi/4) - self.L5*dq5*sin(q2)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L5*dq5*sin(q4)*cos(q1 + pi/4)*cos(q2)*cos(q5) + self.L6*dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q6)*cos(q5) + self.L6*dq1*sin(q1 + pi/4)*sin(q2)*sin(q5)*sin(q6)*cos(q3)*cos(q4) + self.L6*dq1*sin(q1 + pi/4)*sin(q4)*sin(q5)*sin(q6)*cos(q2) - self.L6*dq1*sin(q3)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q4) + self.L6*dq1*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q5) + self.L6*dq2*sin(q2)*sin(q4)*sin(q5)*sin(q6)*cos(q1 + pi/4) - self.L6*dq2*sin(q3)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q5) - self.L6*dq2*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4) - self.L6*dq3*sin(q1 + pi/4)*sin(q3)*sin(q6)*cos(q5) - self.L6*dq3*sin(q1 + pi/4)*sin(q5)*sin(q6)*cos(q3)*cos(q4) + self.L6*dq3*sin(q2)*sin(q3)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q4) - self.L6*dq3*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q5) + self.L6*dq4*sin(q1 + pi/4)*sin(q3)*sin(q4)*sin(q5)*sin(q6) + self.L6*dq4*sin(q2)*sin(q4)*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q3) - self.L6*dq4*sin(q5)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q4) - self.L6*dq5*sin(q1 + pi/4)*sin(q3)*sin(q6)*cos(q4)*cos(q5) - self.L6*dq5*sin(q1 + pi/4)*sin(q5)*sin(q6)*cos(q3) + self.L6*dq5*sin(q2)*sin(q3)*sin(q5)*sin(q6)*cos(q1 + pi/4) - self.L6*dq5*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - self.L6*dq5*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q5) - self.L6*dq6*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q4)*cos(q6) + self.L6*dq6*sin(q1 + pi/4)*cos(q3)*cos(q5)*cos(q6) - self.L6*dq6*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q5)*cos(q6) - self.L6*dq6*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q6) - self.L6*dq6*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q6)),
0.707106781186548*sqrt(2)*self.L6*(dq1*sin(q1 + pi/4)*sin(q2)*sin(q3)*sin(q5)*cos(q6) + dq1*sin(q1 + pi/4)*sin(q2)*sin(q4)*sin(q6)*cos(q3) - dq1*sin(q1 + pi/4)*sin(q2)*cos(q3)*cos(q4)*cos(q5)*cos(q6) - dq1*sin(q1 + pi/4)*sin(q4)*cos(q2)*cos(q5)*cos(q6) - dq1*sin(q1 + pi/4)*sin(q6)*cos(q2)*cos(q4) - dq1*sin(q3)*sin(q4)*sin(q6)*cos(q1 + pi/4) + dq1*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5)*cos(q6) + dq1*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q6) - dq2*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q5)*cos(q6) - dq2*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q4) - dq2*sin(q3)*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q6) - dq2*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q3) + dq2*cos(q1 + pi/4)*cos(q2)*cos(q3)*cos(q4)*cos(q5)*cos(q6) - dq3*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q6) - dq3*sin(q1 + pi/4)*sin(q4)*sin(q6)*cos(q3) + dq3*sin(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5)*cos(q6) + dq3*sin(q2)*sin(q3)*sin(q4)*sin(q6)*cos(q1 + pi/4) - dq3*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q4)*cos(q5)*cos(q6) - dq3*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q6) - dq4*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q5)*cos(q6) - dq4*sin(q1 + pi/4)*sin(q3)*sin(q6)*cos(q4) - dq4*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q5)*cos(q6) - dq4*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4) - dq4*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q2) + dq4*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q5)*cos(q6) - dq5*sin(q1 + pi/4)*sin(q3)*sin(q5)*cos(q4)*cos(q6) + dq5*sin(q1 + pi/4)*cos(q3)*cos(q5)*cos(q6) - dq5*sin(q2)*sin(q3)*cos(q1 + pi/4)*cos(q5)*cos(q6) - dq5*sin(q2)*sin(q5)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q6) - dq5*sin(q4)*sin(q5)*cos(q1 + pi/4)*cos(q2)*cos(q6) - dq6*sin(q1 + pi/4)*sin(q3)*sin(q4)*cos(q6) - dq6*sin(q1 + pi/4)*sin(q3)*sin(q6)*cos(q4)*cos(q5) - dq6*sin(q1 + pi/4)*sin(q5)*sin(q6)*cos(q3) + dq6*sin(q2)*sin(q3)*sin(q5)*sin(q6)*cos(q1 + pi/4) - dq6*sin(q2)*sin(q4)*cos(q1 + pi/4)*cos(q3)*cos(q6) - dq6*sin(q2)*sin(q6)*cos(q1 + pi/4)*cos(q3)*cos(q4)*cos(q5) - dq6*sin(q4)*sin(q6)*cos(q1 + pi/4)*cos(q2)*cos(q5) + dq6*cos(q1 + pi/4)*cos(q2)*cos(q4)*cos(q6)),
0
],
[
0,
self.L2*dq2*sin(q2) + self.L3*dq2*cos(q2)*cos(q3) - self.L3*dq3*sin(q2)*sin(q3) + self.L4*(dq2*sin(q2)*cos(q4) + dq2*sin(q4)*cos(q2)*cos(q3) - dq3*sin(q2)*sin(q3)*sin(q4) + dq4*sin(q2)*cos(q3)*cos(q4) + dq4*sin(q4)*cos(q2)) - self.L5*((sin(q2)*cos(q3)*cos(q4) + sin(q4)*cos(q2))*dq5*sin(q5) + (dq2*sin(q2)*sin(q4) - dq2*cos(q2)*cos(q3)*cos(q4) + dq3*sin(q2)*sin(q3)*cos(q4) + dq4*sin(q2)*sin(q4)*cos(q3) - dq4*cos(q2)*cos(q4))*cos(q5) + dq2*sin(q3)*sin(q5)*cos(q2) + dq3*sin(q2)*sin(q5)*cos(q3) + dq5*sin(q2)*sin(q3)*cos(q5)) + self.L6*(((sin(q2)*cos(q3)*cos(q4) + sin(q4)*cos(q2))*cos(q5) - sin(q2)*sin(q3)*sin(q5))*dq6*cos(q6) - (sin(q2)*sin(q4)*cos(q3) - cos(q2)*cos(q4))*dq6*sin(q6) - ((sin(q2)*cos(q3)*cos(q4) + sin(q4)*cos(q2))*dq5*sin(q5) + (dq2*sin(q2)*sin(q4) - dq2*cos(q2)*cos(q3)*cos(q4) + dq3*sin(q2)*sin(q3)*cos(q4) + dq4*sin(q2)*sin(q4)*cos(q3) - dq4*cos(q2)*cos(q4))*cos(q5) + dq2*sin(q3)*sin(q5)*cos(q2) + dq3*sin(q2)*sin(q5)*cos(q3) + dq5*sin(q2)*sin(q3)*cos(q5))*sin(q6) + (dq2*sin(q2)*cos(q4) + dq2*sin(q4)*cos(q2)*cos(q3) - dq3*sin(q2)*sin(q3)*sin(q4) + dq4*sin(q2)*cos(q3)*cos(q4) + dq4*sin(q4)*cos(q2))*cos(q6)),
-self.L3*dq2*sin(q2)*sin(q3) + self.L3*dq3*cos(q2)*cos(q3) - self.L4*dq2*sin(q2)*sin(q3)*sin(q4) + self.L4*dq3*sin(q4)*cos(q2)*cos(q3) + self.L4*dq4*sin(q3)*cos(q2)*cos(q4) - self.L5*(dq2*sin(q2)*sin(q3)*cos(q4)*cos(q5) + dq2*sin(q2)*sin(q5)*cos(q3) + dq3*sin(q3)*sin(q5)*cos(q2) - dq3*cos(q2)*cos(q3)*cos(q4)*cos(q5) + dq4*sin(q3)*sin(q4)*cos(q2)*cos(q5) + dq5*sin(q3)*sin(q5)*cos(q2)*cos(q4) - dq5*cos(q2)*cos(q3)*cos(q5)) + self.L6*((sin(q3)*cos(q4)*cos(q5) + sin(q5)*cos(q3))*dq6*cos(q2)*cos(q6) - (dq2*sin(q2)*sin(q3)*cos(q4)*cos(q5) + dq2*sin(q2)*sin(q5)*cos(q3) + dq3*sin(q3)*sin(q5)*cos(q2) - dq3*cos(q2)*cos(q3)*cos(q4)*cos(q5) + dq4*sin(q3)*sin(q4)*cos(q2)*cos(q5) + dq5*sin(q3)*sin(q5)*cos(q2)*cos(q4) - dq5*cos(q2)*cos(q3)*cos(q5))*sin(q6) - dq2*sin(q2)*sin(q3)*sin(q4)*cos(q6) + dq3*sin(q4)*cos(q2)*cos(q3)*cos(q6) + dq4*sin(q3)*cos(q2)*cos(q4)*cos(q6) - dq6*sin(q3)*sin(q4)*sin(q6)*cos(q2)),
self.L4*(dq2*sin(q2)*cos(q3)*cos(q4) + dq2*sin(q4)*cos(q2) + dq3*sin(q3)*cos(q2)*cos(q4) + dq4*sin(q2)*cos(q4) + dq4*sin(q4)*cos(q2)*cos(q3)) - self.L5*(sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3))*dq5*sin(q5) - self.L5*(dq2*sin(q2)*sin(q4)*cos(q3) - dq2*cos(q2)*cos(q4) + dq3*sin(q3)*sin(q4)*cos(q2) + dq4*sin(q2)*sin(q4) - dq4*cos(q2)*cos(q3)*cos(q4))*cos(q5) - self.L6*((sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*dq6*sin(q6) + (sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3))*dq5*sin(q5)*sin(q6) - (sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3))*dq6*cos(q5)*cos(q6) + (dq2*sin(q2)*sin(q4)*cos(q3) - dq2*cos(q2)*cos(q4) + dq3*sin(q3)*sin(q4)*cos(q2) + dq4*sin(q2)*sin(q4) - dq4*cos(q2)*cos(q3)*cos(q4))*sin(q6)*cos(q5) - (dq2*sin(q2)*cos(q3)*cos(q4) + dq2*sin(q4)*cos(q2) + dq3*sin(q3)*cos(q2)*cos(q4) + dq4*sin(q2)*cos(q4) + dq4*sin(q4)*cos(q2)*cos(q3))*cos(q6)),
-self.L5*((sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*dq5*cos(q5) + (dq2*sin(q2)*cos(q3)*cos(q4) + dq2*sin(q4)*cos(q2) + dq3*sin(q3)*cos(q2)*cos(q4) + dq4*sin(q2)*cos(q4) + dq4*sin(q4)*cos(q2)*cos(q3))*sin(q5) + dq2*sin(q2)*sin(q3)*cos(q5) - dq3*cos(q2)*cos(q3)*cos(q5) + dq5*sin(q3)*sin(q5)*cos(q2)) - self.L6*((sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*sin(q5) - sin(q3)*cos(q2)*cos(q5))*dq6*cos(q6) - self.L6*((sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*dq5*cos(q5) + (dq2*sin(q2)*cos(q3)*cos(q4) + dq2*sin(q4)*cos(q2) + dq3*sin(q3)*cos(q2)*cos(q4) + dq4*sin(q2)*cos(q4) + dq4*sin(q4)*cos(q2)*cos(q3))*sin(q5) + dq2*sin(q2)*sin(q3)*cos(q5) - dq3*cos(q2)*cos(q3)*cos(q5) + dq5*sin(q3)*sin(q5)*cos(q2))*sin(q6),
-self.L6*(((sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*cos(q5) + sin(q3)*sin(q5)*cos(q2))*dq6*sin(q6) - (sin(q2)*cos(q4) + sin(q4)*cos(q2)*cos(q3))*dq6*cos(q6) - (-(sin(q2)*sin(q4) - cos(q2)*cos(q3)*cos(q4))*dq5*sin(q5) + (dq2*sin(q2)*cos(q3)*cos(q4) + dq2*sin(q4)*cos(q2) + dq3*sin(q3)*cos(q2)*cos(q4) + dq4*sin(q2)*cos(q4) + dq4*sin(q4)*cos(q2)*cos(q3))*cos(q5) - dq2*sin(q2)*sin(q3)*sin(q5) + dq3*sin(q5)*cos(q2)*cos(q3) + dq5*sin(q3)*cos(q2)*cos(q5))*cos(q6) + (dq2*sin(q2)*sin(q4)*cos(q3) - dq2*cos(q2)*cos(q4) + dq3*sin(q3)*sin(q4)*cos(q2) + dq4*sin(q2)*sin(q4) - dq4*cos(q2)*cos(q3)*cos(q4))*sin(q6)),
0
]
])
return z
def djacobi_all(self, q, dq):
"""ヤコビ行列の時間微分を全部計算する
・
"""
djWo = self.djacobi_Wo(q, dq)
djBL = self.djacobi_BL(q, dq)
dj0 = self.djacobi_0(q, dq)
dj1 = self.djacobi_1(q, dq)
dj2 = self.djacobi_2(q, dq)
dj3 = self.djacobi_3(q, dq)
dj4 = self.djacobi_4(q, dq)
dj5 = self.djacobi_5(q, dq)
dj6 = self.djacobi_6(q, dq)
dj7 = self.djacobi_7(q, dq)
djGL = self.djacobi_GL(q, dq)
return [djWo, djBL, dj0, dj1, dj2, dj3, dj4, dj5, dj6, dj7, djGL]
| 275.344398 | 6,124 | 0.585318 | 29,035 | 132,716 | 2.673119 | 0.003857 | 0.065761 | 0.082202 | 0.065761 | 0.988494 | 0.98825 | 0.987013 | 0.985879 | 0.98325 | 0.980145 | 0 | 0.201059 | 0.109459 | 132,716 | 481 | 6,125 | 275.91684 | 0.455592 | 0.001273 | 0 | 0.529691 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.061758 | false | 0 | 0.004751 | 0 | 0.154394 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
dff607370bc06e0893a835f6d801a8c1638260b3 | 41,867 | py | Python | openprocurement/auctions/core/tests/blanks/auction_blanks.py | EBRD-ProzorroSale/openprocurement.auctions.core | 52bd59f193f25e4997612fca0f87291decf06966 | [
"Apache-2.0"
] | 2 | 2016-09-15T20:17:43.000Z | 2017-01-08T03:32:43.000Z | openprocurement/auctions/core/tests/blanks/auction_blanks.py | EBRD-ProzorroSale/openprocurement.auctions.core | 52bd59f193f25e4997612fca0f87291decf06966 | [
"Apache-2.0"
] | 183 | 2017-12-21T11:04:37.000Z | 2019-03-27T08:14:34.000Z | openprocurement/auctions/core/tests/blanks/auction_blanks.py | EBRD-ProzorroSale/openprocurement.auctions.core | 52bd59f193f25e4997612fca0f87291decf06966 | [
"Apache-2.0"
] | 12 | 2016-09-05T12:07:48.000Z | 2019-02-26T09:24:17.000Z | # -*- coding: utf-8 -*-
from copy import deepcopy
from datetime import timedelta
from openprocurement.api.utils import get_now
def submission_method_details_no_auction(self):
self.initial_data['submissionMethodDetails'] = u'quick(mode:no-auction)'
self.create_auction()
self.app.authorization = ('Basic', ('auction', ''))
auction = self.app.post_json(
'/auctions/{}/auction'.format(self.auction_id),
{'data': {'bids': self.initial_bids}}
).json['data']
self.assertEqual(auction['auctionPeriod']['startDate'],
auction['auctionPeriod']['endDate'])
self.assertEqual(auction['status'], "active.qualification")
def submission_method_details_fast_forward(self):
self.initial_data['submissionMethodDetails'] = u'quick(mode:fast-forward)'
self.create_auction()
self.app.authorization = ('Basic', ('auction', ''))
auction = self.app.post_json('/auctions/{}/auction'.format(self.auction_id),
{'data': {'bids': self.initial_bids}}).json['data']
self.assertEqual(auction['auctionPeriod']['startDate'],
auction['auctionPeriod']['endDate'])
self.assertEqual(auction['status'], "active.qualification")
# AuctionAuctionResourceTest
def get_auction_auction_not_found(self):
response = self.app.get('/auctions/some_id/auction', status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'auction_id'}
])
response = self.app.patch_json('/auctions/some_id/auction', {'data': {}}, status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'auction_id'}
])
response = self.app.post_json('/auctions/some_id/auction', {'data': {}}, status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'auction_id'}
])
def get_auction_auction(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.get('/auctions/{}/auction'.format(self.auction_id), status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't get auction info in current (active.tendering) auction status")
self.set_status('active.auction')
response = self.app.get('/auctions/{}/auction'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertNotEqual(auction, self.initial_data)
self.assertIn('dateModified', auction)
self.assertIn('minimalStep', auction)
self.assertNotIn("procuringEntity", auction)
self.assertNotIn("tenderers", auction["bids"][0])
self.assertEqual(auction["bids"][0]['value']['amount'], self.initial_bids[0]['value']['amount'])
self.assertEqual(auction["bids"][1]['value']['amount'], self.initial_bids[1]['value']['amount'])
# self.assertEqual(self.initial_data["auctionPeriod"]['startDate'], auction["auctionPeriod"]['startDate'])
response = self.app.get('/auctions/{}/auction?opt_jsonp=callback'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/javascript')
self.assertIn('callback({"data": {"', response.body)
response = self.app.get('/auctions/{}/auction?opt_pretty=1'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertIn('{\n "data": {\n "', response.body)
self.set_status('active.qualification')
response = self.app.get('/auctions/{}/auction'.format(self.auction_id), status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't get auction info in current (active.qualification) auction status")
def patch_auction_auction(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': {}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't update auction urls in current (active.tendering) auction status")
self.set_status('active.auction')
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id),
{'data': {'bids': [{'invalid_field': 'invalid_value'}]}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': {u'invalid_field': u'Rogue field'}, u'location': u'body', u'name': u'bids'}
])
patch_data = {
'auctionUrl': u'http://auction-sandbox.openprocurement.org/auctions/{}'.format(self.auction_id),
'bids': [
{
"id": self.initial_bids[1]['id'],
"participationUrl": u'http://auction-sandbox.openprocurement.org/auctions/{}?key_for_bid={}'.format(
self.auction_id, self.initial_bids[1]['id'])
}
]
}
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Number of auction results did not match the number of auction bids")
patch_data['bids'].append({
"participationUrl": u'http://auction-sandbox.openprocurement.org/auctions/{}?key_for_bid={}'.format(
self.auction_id, self.initial_bids[0]['id'])
})
patch_data['bids'][1]['id'] = "some_id"
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], {u'id': [u'Hash value is wrong length.']})
patch_data['bids'][1]['id'] = "00000000000000000000000000000000"
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Auction bids should be identical to the auction bids")
patch_data['bids'][1]['id'] = self.initial_bids[0]['id']
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertEqual(auction["bids"][0]['participationUrl'], patch_data["bids"][1]['participationUrl'])
self.assertEqual(auction["bids"][1]['participationUrl'], patch_data["bids"][0]['participationUrl'])
self.set_status('complete')
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't update auction urls in current (complete) auction status")
def post_auction_auction_document(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.post('/auctions/{}/documents'.format(self.auction_id),
upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't add document in current (active.tendering) auction status")
self.set_status('active.auction')
response = self.app.post('/auctions/{}/documents'.format(self.auction_id),
upload_files=[('file', 'name.doc', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
key = response.json["data"]["url"].split('?')[-1].split('=')[-1]
patch_data = {
'bids': [
{
"id": self.initial_bids[1]['id'],
"value": {
"amount": 419,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
},
{
'id': self.initial_bids[0]['id'],
"value": {
"amount": 409,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
]
}
response = self.app.post_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.put('/auctions/{}/documents/{}'.format(self.auction_id, doc_id),
upload_files=[('file', 'name.doc', 'content_with_names')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"]["id"])
key2 = response.json["data"]["url"].split('?')[-1].split('=')[-1]
self.assertNotEqual(key, key2)
self.set_status('complete')
response = self.app.post('/auctions/{}/documents'.format(self.auction_id),
upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't add document in current (complete) auction status")
# AuctionSameValueAuctionResourceTest
def post_auction_auction_not_changed(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.post_json('/auctions/{}/auction'.format(self.auction_id), {'data': {'bids': self.initial_bids}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertEqual('active.qualification', auction["status"])
self.assertEqual(auction["awards"][0]['bid_id'], self.initial_bids[0]['id'])
self.assertEqual(auction["awards"][0]['value']['amount'], self.initial_bids[0]['value']['amount'])
self.assertEqual(auction["awards"][0]['suppliers'], self.initial_bids[0]['tenderers'])
def post_auction_auction_reversed(self):
self.app.authorization = ('Basic', ('auction', ''))
now = get_now()
patch_data = {
'bids': [
{
"id": b['id'],
"date": (now - timedelta(seconds=i)).isoformat(),
"value": b['value']
}
for i, b in enumerate(self.initial_bids)
]
}
response = self.app.post_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertEqual('active.qualification', auction["status"])
self.assertEqual(auction["awards"][0]['bid_id'], self.initial_bids[2]['id'])
self.assertEqual(auction["awards"][0]['value']['amount'], self.initial_bids[2]['value']['amount'])
self.assertEqual(auction["awards"][0]['suppliers'], self.initial_bids[2]['tenderers'])
# AuctionLotAuctionResourceTest
def get_auction_auction_lot(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.get('/auctions/{}/auction'.format(self.auction_id), status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't get auction info in current (active.tendering) auction status")
self.set_status('active.auction')
response = self.app.get('/auctions/{}/auction'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertNotEqual(auction, self.initial_data)
self.assertIn('dateModified', auction)
self.assertIn('minimalStep', auction)
self.assertIn('lots', auction)
self.assertNotIn("procuringEntity", auction)
self.assertNotIn("tenderers", auction["bids"][0])
self.assertEqual(auction["bids"][0]['lotValues'][0]['value']['amount'],
self.initial_bids[0]['lotValues'][0]['value']['amount'])
self.assertEqual(auction["bids"][1]['lotValues'][0]['value']['amount'],
self.initial_bids[1]['lotValues'][0]['value']['amount'])
self.set_status('active.qualification')
response = self.app.get('/auctions/{}/auction'.format(self.auction_id), status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't get auction info in current (active.qualification) auction status")
def patch_auction_auction_lot(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': {}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't update auction urls in current (active.tendering) auction status")
self.set_status('active.auction')
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id),
{'data': {'bids': [{'invalid_field': 'invalid_value'}]}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': {u'invalid_field': u'Rogue field'}, u'location': u'body', u'name': u'bids'}
])
patch_data = {
'auctionUrl': u'http://auction-sandbox.openprocurement.org/auctions/{}'.format(self.auction_id),
'bids': [
{
"id": self.initial_bids[1]['id'],
"participationUrl": u'http://auction-sandbox.openprocurement.org/auctions/{}?key_for_bid={}'.format(
self.auction_id, self.initial_bids[1]['id'])
}
]
}
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': [{u'participationUrl': [u'url should be posted for each lot of bid']}], u'location': u'body',
u'name': u'bids'}
])
del patch_data['bids'][0]["participationUrl"]
patch_data['bids'][0]['lotValues'] = [
{
"participationUrl": u'http://auction-sandbox.openprocurement.org/auctions/{}?key_for_bid={}'.format(
self.auction_id, self.initial_bids[0]['id'])
}
]
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': ["url should be posted for each lot"], u'location': u'body', u'name': u'auctionUrl'}
])
patch_data['lots'] = [
{
"auctionUrl": patch_data.pop('auctionUrl')
}
]
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Number of auction results did not match the number of auction bids")
patch_data['bids'].append({
'lotValues': [
{
"participationUrl": u'http://auction-sandbox.openprocurement.org/auctions/{}?key_for_bid={}'.format(
self.auction_id, self.initial_bids[0]['id'])
}
]
})
patch_data['bids'][1]['id'] = "some_id"
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], {u'id': [u'Hash value is wrong length.']})
patch_data['bids'][1]['id'] = "00000000000000000000000000000000"
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Auction bids should be identical to the auction bids")
patch_data['bids'][1]['id'] = self.initial_bids[0]['id']
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertIsNone(response.json)
for lot in self.initial_lots:
response = self.app.patch_json('/auctions/{}/auction/{}'.format(self.auction_id, lot['id']),
{'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertEqual(auction["bids"][0]['lotValues'][0]['participationUrl'],
patch_data["bids"][1]['lotValues'][0]['participationUrl'])
self.assertEqual(auction["bids"][1]['lotValues'][0]['participationUrl'],
patch_data["bids"][0]['lotValues'][0]['participationUrl'])
self.assertEqual(auction["lots"][0]['auctionUrl'], patch_data["lots"][0]['auctionUrl'])
self.set_status('complete')
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't update auction urls in current (complete) auction status")
def post_auction_auction_document_lot(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.post('/auctions/{}/documents'.format(self.auction_id),
upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't add document in current (active.tendering) auction status")
self.set_status('active.auction')
response = self.app.post('/auctions/{}/documents'.format(self.auction_id),
upload_files=[('file', 'name.doc', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
key = response.json["data"]["url"].split('?')[-1].split('=')[-1]
response = self.app.patch_json('/auctions/{}/documents/{}'.format(self.auction_id, doc_id),
{'data': {"documentOf": "lot", 'relatedItem': self.initial_lots[0]['id']}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentOf"], "lot")
self.assertEqual(response.json["data"]["relatedItem"], self.initial_lots[0]['id'])
patch_data = {
'bids': [
{
"id": self.initial_bids[1]['id'],
'lotValues': [
{
"value": {
"amount": 409,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
]
},
{
'id': self.initial_bids[0]['id'],
'lotValues': [
{
"value": {
"amount": 419,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
]
}
]
}
response = self.app.post_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.put('/auctions/{}/documents/{}'.format(self.auction_id, doc_id),
upload_files=[('file', 'name.doc', 'content_with_names')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"]["id"])
key2 = response.json["data"]["url"].split('?')[-1].split('=')[-1]
self.assertNotEqual(key, key2)
self.set_status('complete')
response = self.app.post('/auctions/{}/documents'.format(self.auction_id),
upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't add document in current (complete) auction status")
# AuctionMultipleLotAuctionResourceTest
def get_auction_auction_2_lots(self):
response = self.app.get('/auctions/{}/auction'.format(self.auction_id), status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't get auction info in current (active.tendering) auction status")
self.set_status('active.auction')
response = self.app.get('/auctions/{}/auction'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertNotEqual(auction, self.initial_data)
self.assertIn('dateModified', auction)
self.assertIn('minimalStep', auction)
self.assertIn('lots', auction)
self.assertNotIn("procuringEntity", auction)
self.assertNotIn("tenderers", auction["bids"][0])
self.assertEqual(auction["bids"][0]['lotValues'][0]['value']['amount'],
self.initial_bids[0]['lotValues'][0]['value']['amount'])
self.assertEqual(auction["bids"][1]['lotValues'][0]['value']['amount'],
self.initial_bids[1]['lotValues'][0]['value']['amount'])
self.assertEqual(auction["bids"][0]['lotValues'][1]['value']['amount'],
self.initial_bids[0]['lotValues'][1]['value']['amount'])
self.assertEqual(auction["bids"][1]['lotValues'][1]['value']['amount'],
self.initial_bids[1]['lotValues'][1]['value']['amount'])
self.set_status('active.qualification')
response = self.app.get('/auctions/{}/auction'.format(self.auction_id), status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't get auction info in current (active.qualification) auction status")
def patch_auction_auction_2_lots(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': {}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't update auction urls in current (active.tendering) auction status")
self.set_status('active.auction')
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/auctions/{}'.format(self.auction_id), {'data': {'id': self.auction_id}})
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id),
{'data': {'bids': [{'invalid_field': 'invalid_value'}]}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': {u'invalid_field': u'Rogue field'}, u'location': u'body', u'name': u'bids'}
])
patch_data = {
'auctionUrl': u'http://auction-sandbox.openprocurement.org/auctions/{}'.format(self.auction_id),
'bids': [
{
"id": self.initial_bids[1]['id'],
"participationUrl": u'http://auction-sandbox.openprocurement.org/auctions/{}?key_for_bid={}'.format(
self.auction_id, self.initial_bids[1]['id'])
}
]
}
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': [{u'participationUrl': [u'url should be posted for each lot of bid']}], u'location': u'body',
u'name': u'bids'}
])
del patch_data['bids'][0]["participationUrl"]
patch_data['bids'][0]['lotValues'] = [
{
"participationUrl": u'http://auction-sandbox.openprocurement.org/auctions/{}?key_for_bid={}'.format(
self.auction_id, self.initial_bids[0]['id'])
}
]
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': ["url should be posted for each lot"], u'location': u'body', u'name': u'auctionUrl'}
])
patch_data['lots'] = [
{
"auctionUrl": patch_data.pop('auctionUrl')
}
]
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Number of auction results did not match the number of auction bids")
patch_data['bids'].append({
'lotValues': [
{
"participationUrl": u'http://auction-sandbox.openprocurement.org/auctions/{}?key_for_bid={}'.format(
self.auction_id, self.initial_bids[0]['id'])
}
]
})
patch_data['bids'][1]['id'] = "some_id"
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], {u'id': [u'Hash value is wrong length.']})
patch_data['bids'][1]['id'] = "00000000000000000000000000000000"
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Auction bids should be identical to the auction bids")
patch_data['bids'][1]['id'] = self.initial_bids[0]['id']
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
u'Number of lots did not match the number of auction lots')
patch_data['lots'] = [patch_data['lots'][0].copy() for _ in self.initial_lots]
patch_data['lots'][1]['id'] = "00000000000000000000000000000000"
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], u'Auction lots should be identical to the auction lots')
patch_data['lots'][1]['id'] = self.initial_lots[1]['id']
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
[{"lotValues": ["Number of lots of auction results did not match the number of auction lots"]}])
for bid in patch_data['bids']:
bid['lotValues'] = [bid['lotValues'][0].copy() for i in self.initial_lots]
patch_data['bids'][0]['lotValues'][1]['relatedLot'] = self.initial_bids[0]['lotValues'][0]['relatedLot']
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
[{u'lotValues': [{u'relatedLot': [u'relatedLot should be one of lots of bid']}]}])
patch_data['bids'][0]['lotValues'][1]['relatedLot'] = self.initial_bids[0]['lotValues'][1]['relatedLot']
response = self.app.patch_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertIsNone(response.json)
for lot in self.initial_lots:
response = self.app.patch_json('/auctions/{}/auction/{}'.format(self.auction_id, lot['id']),
{'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertEqual(auction["bids"][0]['lotValues'][0]['participationUrl'],
patch_data["bids"][1]['lotValues'][0]['participationUrl'])
self.assertEqual(auction["bids"][1]['lotValues'][0]['participationUrl'],
patch_data["bids"][0]['lotValues'][0]['participationUrl'])
self.assertEqual(auction["lots"][0]['auctionUrl'], patch_data["lots"][0]['auctionUrl'])
response = self.app.post_json('/auctions/{}/cancellations?acc_token={}'.format(
self.auction_id, self.auction_token
), {'data': {
'reason': 'cancellation reason',
'status': 'active',
"cancellationOf": "lot",
"relatedLot": self.initial_lots[0]['id']
}})
self.assertEqual(response.status, '201 Created')
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.patch_json('/auctions/{}/auction/{}'.format(self.auction_id, self.initial_lots[0]['id']),
{'data': patch_data}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can update auction urls only in active lot status")
def post_auction_auction_document_2_lots(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.post('/auctions/{}/documents'.format(self.auction_id),
upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't add document in current (active.tendering) auction status")
self.set_status('active.auction')
response = self.app.post('/auctions/{}/documents'.format(self.auction_id),
upload_files=[('file', 'name.doc', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
key = response.json["data"]["url"].split('?')[-1].split('=')[-1]
response = self.app.patch_json('/auctions/{}/documents/{}'.format(self.auction_id, doc_id),
{'data': {"documentOf": "lot", 'relatedItem': self.initial_lots[0]['id']}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentOf"], "lot")
self.assertEqual(response.json["data"]["relatedItem"], self.initial_lots[0]['id'])
patch_data = {
'bids': [
{
"id": self.initial_bids[1]['id'],
'lotValues': [
{
"value": {
"amount": 409,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
for _ in self.initial_lots
]
},
{
'id': self.initial_bids[0]['id'],
'lotValues': [
{
"value": {
"amount": 419,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
for i in self.initial_lots
]
}
]
}
response = self.app.post_json('/auctions/{}/auction'.format(self.auction_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.put('/auctions/{}/documents/{}'.format(self.auction_id, doc_id),
upload_files=[('file', 'name.doc', 'content_with_names')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"]["id"])
key2 = response.json["data"]["url"].split('?')[-1].split('=')[-1]
self.assertNotEqual(key, key2)
self.set_status('complete')
response = self.app.post('/auctions/{}/documents'.format(self.auction_id),
upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"],
"Can't add document in current (complete) auction status")
# AuctionFeaturesAuctionResourceTest
def get_auction_features_auction(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.get('/auctions/{}/auction'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertNotEqual(auction, self.initial_data)
self.assertIn('dateModified', auction)
self.assertIn('minimalStep', auction)
self.assertNotIn("procuringEntity", auction)
self.assertNotIn("tenderers", auction["bids"][0])
self.assertEqual(auction["bids"][0]['value']['amount'], self.initial_bids[0]['value']['amount'])
self.assertEqual(auction["bids"][1]['value']['amount'], self.initial_bids[1]['value']['amount'])
self.assertIn('features', auction)
self.assertIn('parameters', auction["bids"][0])
def koatuu_additional_classification(self):
input_classification = [{"scheme": "koatuu",
"id": "0110136600",
"description": "test"}]
initial_data = deepcopy(self.initial_data)
initial_data['items'][0]['additionalClassifications'] = input_classification
auction = self.create_auction_unit(data=initial_data)
output_classification = auction['data']['items'][0]['additionalClassifications']
self.assertEqual(input_classification, output_classification)
input_classification[0]['id'] = '1421580802'
response = self.create_auction_unit(data=initial_data, status=201)
input_classification[0]['id'] = '1110136600'
response = self.create_auction_unit(data=initial_data, status=422)
self.assertEqual(response['status'], 'error')
input_classification[0]['id'] = '7510136600'
response = self.create_auction_unit(data=initial_data, status=422)
self.assertEqual(response['status'], 'error')
def patch_auction_in_rectificationPeriod(test_case):
# test_case.app.authorization = ('Basic', ('broker', ''))
pre_patch_title = test_case.app.get("/auctions/{0}".format(test_case.auction_id)).json['data']['title']
test_case.app.patch_json(
"/auctions/{0}?acc_token={1}".format(
test_case.auction_id,
test_case.auction_token,
),
{'data': {'title': 'lol'}}
)
after_patch_title = test_case.app.get(
"/auctions/{0}".format(test_case.auction_id),
).json['data']['title']
assert pre_patch_title != after_patch_title
assert after_patch_title == 'lol'
def patch_auction_after_rectificationPeriod(test_case):
target_attr = 'tenderAttempts'
target_value = 5
pre_patch_value = test_case.app.get("/auctions/{0}".format(test_case.auction_id)).json['data'][target_attr]
# let's forge rectificationPeriod
auc_doc = test_case.db[test_case.auction_id]
rectification_period = {
'rectificationPeriod': {
'startDate': (get_now() - timedelta(days=7)).isoformat(),
'endDate': (get_now() - timedelta(days=5)).isoformat(),
}
}
auc_doc.update(rectification_period)
test_case.db[test_case.auction_id] = auc_doc
# forged
test_case.app.patch_json(
"/auctions/{0}?acc_token={1}".format(
test_case.auction_id,
test_case.auction_token,
),
{'data': {target_attr: target_value}}
)
after_patch_value = test_case.app.get(
"/auctions/{0}".format(test_case.auction_id),
).json['data'][target_attr]
assert pre_patch_value == after_patch_value
| 47.957617 | 120 | 0.632551 | 4,635 | 41,867 | 5.592017 | 0.046818 | 0.126162 | 0.162391 | 0.057178 | 0.921216 | 0.915815 | 0.903198 | 0.891161 | 0.883059 | 0.878043 | 0 | 0.021307 | 0.19624 | 41,867 | 872 | 121 | 48.012615 | 0.748923 | 0.009244 | 0 | 0.739191 | 0 | 0 | 0.275266 | 0.026912 | 0 | 0 | 0 | 0 | 0.35007 | 1 | 0.025105 | false | 0 | 0.004184 | 0 | 0.029289 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5f08547c4f8eb0b145b434483a6050cf2b103671 | 39,410 | py | Python | CTFd/initial.py | RealityAbb/panSky | fadf7063094f809f679d0bcaafbd161054b6b63b | [
"Apache-2.0"
] | null | null | null | CTFd/initial.py | RealityAbb/panSky | fadf7063094f809f679d0bcaafbd161054b6b63b | [
"Apache-2.0"
] | null | null | null | CTFd/initial.py | RealityAbb/panSky | fadf7063094f809f679d0bcaafbd161054b6b63b | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# coding=utf-8
from flask_sqlalchemy import SQLAlchemy
from CTFd.models import db, LMRoute, DLeadMachineCert,Certificates
#from CTFd import models
from socket import inet_aton, inet_ntoa
from struct import unpack, pack
from struct import *
from time import ctime,sleep
from os import system
from flask import current_app as app
import os
import socket
import struct
import ctypes
import datetime
#def IPHeader():
import thread, time
import Transport
from generalfunction import GenerateSN,GeneratePacketHeader,Confirm
def LMGeneratePacketHeader(target = 0):
buf = ctypes.create_string_buffer(128)
struct.pack_into('B',buf,5,target)
return buf.raw
#class Hello():
class CGenerateDeviceCert():
def __init__(self, parameters, target = 0):
self.sn = GenerateSN()
self.target = target
self.FunCode = 253
self.Param1 = 253
self.Param2 = 268
self.Command_Code = 3
self.reserved = 0
self.serial = 0
self.country = parameters[0]
self.province = parameters[1]
self.city = parameters[2]
self.organ = parameters[3]
self.depart = parameters[4]
self.name = parameters[5]
self.email = parameters[6]
self.flag = 128
def PackContent(self):
buf = ctypes.create_string_buffer(288)
country = (self.country + '\x00' * 4)[:4]
province = (self.province + '\x00' * 32)[:32]
city = (self.city + '\x00' * 32)[:32]
organ = (self.organ + '\x00' * 64)[:64]
depart = (self.depart + '\x00' * 32)[:32]
name = (self.name + '\x00' * 32)[:32]
email = (self.email + '\x00' * 64)[:64]
struct.pack_into('!BBHB', buf, 0, self.FunCode, self.Param1, self.Param2, self.Command_Code)
struct.pack_into('!L4s32s',buf,8, self.serial, country, province)
struct.pack_into('32s64s32s32s', buf, 48, city, organ, depart, name)
struct.pack_into('64s',buf,208, email)
struct.pack_into('B',buf, 272, self.flag)
return buf.raw
def PackPacket(self):
snh = struct.pack("!L", self.sn)
PacketHeader = LMGeneratePacketHeader(self.target)
PacketContent = self.PackContent()
confirm = Confirm()
packet_send = snh + PacketHeader + PacketContent + confirm
return packet_send
def ParsePacket(self, packet_receive):
ip_header = packet_receive[0:20]
ip_protocol = unpack('!B',ip_header[9])[0]
if ip_protocol != 254:
return None
snh = packet_receive[20:24]
sn = unpack('!L', snh)[0]
print 'self.sn = ', self.sn
print 'sn = ', sn
if sn != self.sn + 1:
return None
content_receive_pack = packet_receive[152:156]
content_receive = unpack('BBH' , content_receive_pack)
FunCode = content_receive[0]
if FunCode != 253:
return None
Param = content_receive[1]
Length = content_receive[2] - 2
content_receive_data_head_pack = packet_receive[158:162]
content_receive_data_head = unpack('!BBH',content_receive_data_head_pack)
Return_Code = content_receive_data_head[0]
if Return_Code != self.Command_Code + 1:
return None
Status = content_receive_data_head[1]
return Status
def SendAndReceive(self):
packet_send = self.PackPacket()
packet_receive = Transport.SocketTransport(packet_send, '0.0.0.0', self.sn)
if packet_receive == None:
return -2
status = self.ParsePacket(packet_receive)
if status == None:
return -1
else:
return status
class ExportCert():
def __init__(self, target = 0):
self.dest_host = "0.0.0.0"
self.target = target
self.sn = GenerateSN()
self.FunCode = 253
self.Param1 = 253
self.Param2 = 1
self.Command_Code = 21
self.flag = 128 # 0x80
def PackContent(self):
buf = ctypes.create_string_buffer(16)
struct.pack_into('!BBHBB', buf, 0, self.FunCode, self.Param1, self.Param2, self.Command_Code, self.flag)
return buf.raw
def PackPacket(self):
snh = struct.pack("!L", self.sn)
PacketHeader = LMGeneratePacketHeader(self.target)
PacketContent = self.PackContent()
confirm = Confirm()
packet_send = snh + PacketHeader + PacketContent + confirm
return packet_send
def ParsePacket(self, packet_receive):
ip_header = packet_receive[0:20]
ip_protocol = unpack('!B',ip_header[9])[0]
if ip_protocol != 254:
return None
snh = packet_receive[20:24]
sn = unpack('!L', snh)[0]
if sn != self.sn + 1:
return None
content_receive_pack = packet_receive[152:156]
content_receive = unpack('BBH' , content_receive_pack)
FunCode = content_receive[0]
if FunCode != 253:
return None
Param = content_receive[1]
Length = content_receive[2] - 2
print Length
content_receive_data_head_pack = packet_receive[158:162]
content_receive_data_head = unpack('!BBH',content_receive_data_head_pack)
Return_Code = content_receive_data_head[0]
print Return_Code
if Return_Code != self.Command_Code + 1:
return None
Status = content_receive_data_head[1]
if Status == 0:
content_receive_data_length = unpack('!H',packet_receive[162:164])[0]
s = str(content_receive_data_length) + 's'
content_receive_data_pack = packet_receive[164 : 164 + content_receive_data_length]
content_receive_data = unpack(s,content_receive_data_pack)[0]
print repr(content_receive_data)
path = os.path.join(app.config['CERTIFICATE_FOLDER'], 'premachine.tar.gz')
with open(path,"w") as f:
f.write((content_receive_data))
return Status
def SendAndRecieve(self):
packet_send = self.PackPacket()
packet_receive = Transport.SocketTransport(packet_send, self.dest_host, self.sn)
if packet_receive == None: ## time out
return -2
status = self.ParsePacket(packet_receive)
if status == None:
return -1
else:
return status
class InitialUkey():
def __init__(self,parameters,target = 0):
self.dest_host = '0.0.0.0'
self.target = target
self.sn = GenerateSN()
# the
self.FunCode = 253
self.Param1 = 253 #parameters 1
self.Param2 = 332
self.Command_Code = 110
self.reserved = 0 #unsigned char reserved[3]
self.pk = parameters[7]
self.serial = 0
self.country = parameters[0] #channelnumber
self.province = parameters[1]
self.city = parameters[2]
self.organ = parameters[3]
self.depart = parameters[4]
self.name = parameters[5]
self.email = parameters[6]
self.flag = 128 # 0x80
self.id = parameters[8].split("\x00")[0]
def PackContent(self):
buf = ctypes.create_string_buffer(352)
country = (self.country + '\x00' * 4)[:4]
province = (self.province + '\x00' * 32)[:32]
city = (self.city + '\x00' * 32)[:32]
organ = (self.organ + '\x00' * 64)[:64]
depart = (self.depart + '\x00' * 32)[:32]
name = (self.name + '\x00' * 32)[:32]
email = (self.email + '\x00' * 64)[:64]
pk = (self.pk + '\x00' * 64)[:64]
struct.pack_into('!BBHB', buf, 0, self.FunCode, self.Param1, self.Param2, self.Command_Code)
struct.pack_into('64s',buf,8,pk)
struct.pack_into('!L4s32s',buf,72, self.serial, country, province)
struct.pack_into('32s64s32s32s', buf, 112, city, organ, depart, name)
struct.pack_into('64s',buf,272, email)
struct.pack_into('B',buf, 336, self.flag)
return buf.raw
def PackPacket(self):
snh = struct.pack("!L", self.sn)
PacketHeader = LMGeneratePacketHeader(self.target)
PacketContent = self.PackContent()
confirm = Confirm()
packet_send = snh + PacketHeader + PacketContent + confirm
return packet_send
def ParsePacket(self, packet_receive):
ip_header = packet_receive[0:20]
ip_protocol = unpack('!B',ip_header[9])[0]
if ip_protocol != 254:
return None
snh = packet_receive[20:24]
sn = unpack('!L', snh)[0]
if sn != self.sn + 1:
return None
content_receive_pack = packet_receive[152:156]
content_receive = unpack('BBH' , content_receive_pack)
FunCode = content_receive[0]
if FunCode != 253:
return None
Param = content_receive[1]
Length = content_receive[2] - 2
content_receive_data_head_pack = packet_receive[158:162]
content_receive_data_head = unpack('!BBH',content_receive_data_head_pack)
Return_Code = content_receive_data_head[0]
if Return_Code != self.Command_Code + 1:
return None
Status = content_receive_data_head[1]
if Status == 0:
content_receive_data_length = unpack('i',packet_receive[162:166])[0]
s = str(content_receive_data_length) + 's'
content_receive_data_pack = packet_receive[166 : 166 + content_receive_data_length]
content_receive_data = unpack(s,content_receive_data_pack)[0]
path = os.path.join(app.config['CERTIFICATE_FOLDER'], self.id + '.pem')
with open(path,"w") as f:
f.write((content_receive_data))
return Status
def SendAndReceive(self):
packet_send = self.PackPacket()
packet_receive = Transport.SocketTransport(packet_send, self.dest_host, self.sn)
if packet_receive == None: ## time out
return -2
status = self.ParsePacket(packet_receive)
if status == None:
return -1
else:
return status
class ExportUKeyResp():
def __init__(self, target = 1):
self.dest_host = "0.0.0.0"
self.target = target
self.sn = GenerateSN()
# the
self.FunCode = 253
self.Param1 = 253 #parameters 1
self.Param2 = 1
self.Command_Code = 53
self.flag = 128 # 0x80
def PackContent(self):
buf = ctypes.create_string_buffer(16)
struct.pack_into('!BBHBB', buf, 0, self.FunCode, self.Param1, self.Param2, self.Command_Code, self.flag)
return buf.raw
def PackPacket(self):
snh = struct.pack("!L", self.sn)
PacketHeader = LMGeneratePacketHeader(self.target)
PacketContent = self.PackContent()
confirm = Confirm()
packet_send = snh + PacketHeader + PacketContent + confirm
return packet_send
def ParsePacket(self, packet_receive):
#Receive()
if len(packet_receive) < 156:
return None
ip_header = packet_receive[0:20]
ip_protocol = unpack('!B',ip_header[9])[0]
if ip_protocol != 254:
return None
snh = packet_receive[20:24]
sn = unpack('!L', snh)[0]
if sn != self.sn + 1:
return None
content_receive_pack = packet_receive[152:156]
content_receive = unpack('BBH' , content_receive_pack)
FunCode = content_receive[0]
if FunCode != 253:
return None
Param = content_receive[1]
Length = content_receive[2] - 2
content_receive_data_head_pack = packet_receive[158:162]
content_receive_data_head = unpack('!BBH',content_receive_data_head_pack)
Return_Code = content_receive_data_head[0]
if Return_Code != self.Command_Code + 1:
return None
Status = content_receive_data_head[1]
if Status == 0:
content_receive_data_length = unpack('!H',packet_receive[162:164])[0]
s = str(content_receive_data_length) + 's'
content_receive_data_pack = packet_receive[164 : 164 + content_receive_data_length]
content_receive_data = unpack(s,content_receive_data_pack)[0]
with open('static/uploads/usbkey.der',"w") as f:
f.write((content_receive_data))
return Status
def SendAndReceive(self):
packet_send = self.PackPacket()
packet_receive = Transport.SocketTransport(packet_send, self.dest_host, self.sn)
if packet_receive == None: ## time out
return -2
status = self.ParsePacket(packet_receive)
if status == None:
return -1
else:
return status
class ConfigPredeviceIP():
def __init__(self, parameters, target = 0):
self.dest_host = "0.0.0.0"
self.target = 1
self.sn = GenerateSN()
# the
self.FunCode = 253
self.Param1 = 253 #parameters 1
self.Param2 = 44 #Length
self.Command_Code = 106
self.reserved = 0
self.ipaddr1 = socket.inet_aton(parameters[0])
self.ipmask1 = socket.inet_aton(parameters[1])
self.ipaddr2 = socket.inet_aton(parameters[2])
self.ipmask2 = socket.inet_aton(parameters[3])
self.ipaddr3 = socket.inet_aton(parameters[4])
self.ipmask3 = socket.inet_aton(parameters[5])
self.ipaddr4 = socket.inet_aton(parameters[6])
self.ipmask4 = socket.inet_aton(parameters[7])
self.ipaddr5 = socket.inet_aton(parameters[8])
self.ipmask5 = socket.inet_aton(parameters[9])
self.flag = 128 # 0x80
def PackContent(self):
buf = ctypes.create_string_buffer(64)
struct.pack_into('!BBHB', buf, 0, self.FunCode, self.Param1, self.Param2, self.Command_Code)
struct.pack_into('4s4s4s4s4s4s4s4s4s4sB', buf, 8, self.ipaddr1,self.ipmask1, self.ipaddr2,self.ipmask2, self.ipaddr3,self.ipmask3, self.ipaddr4,self.ipmask4, self.ipaddr5,self.ipmask5, self.flag)
return buf.raw
def PackPacket(self):
snh = struct.pack("!L", self.sn)
PacketHeader = LMGeneratePacketHeader(self.target)
PacketContent = self.PackContent()
confirm = Confirm()
packet_send = snh + PacketHeader + PacketContent + confirm
return packet_send
def ParsePacket(self, packet_receive):
ip_header = packet_receive[0:20]
ip_protocol = unpack('!B',ip_header[9])[0]
if ip_protocol != 254:
return None
snh = packet_receive[20:24]
sn = unpack('!L', snh)[0]
if sn != self.sn + 1:
return None
content_receive_pack = packet_receive[152:156]
content_receive = unpack('BBH' , content_receive_pack)
FunCode = content_receive[0]
if FunCode != self.FunCode:
return None
Param = content_receive[1]
Length = content_receive[2] - 2
content_receive_data_head_pack = packet_receive[158:162]
content_receive_data_head = unpack('!BBH',content_receive_data_head_pack)
Return_Code = content_receive_data_head[0]
if Return_Code != self.Command_Code + 1:
return None
Status = content_receive_data_head[1]
return Status
def SendAndReceive(self):
packet_send = self.PackPacket()
packet_receive = Transport.SocketTransport(packet_send, self.dest_host, self.sn, 6)
if packet_receive == None: ## time out
return -2
status = self.ParsePacket(packet_receive)
if status == None:
return -1
else:
return status
class ConfigPredeviceRoute():
def __init__(self, parameters, target = 0):
self.dest_host = "0.0.0.0"
self.target = target
self.sn = GenerateSN()
# the
self.FunCode = 253
self.Param1 = 253 #parameters 1
self.Param2 = 99 #Length
self.Command_Code = 107
self.operation = parameters[0]
self.keyword = parameters[1]
self.IPAddr = parameters[2]
self.Mask = parameters[3]
self.Gateway = parameters[4]
self.interface = parameters[5]
self.flag = 128 # 0x80
def PackContent(self):
buf = ctypes.create_string_buffer(112)
struct.pack_into('!BBHBBB', buf, 0, self.FunCode, self.Param1, self.Param2, self.Command_Code, self.operation,self.keyword)
struct.pack_into('!'+ str(len(self.IPAddr))+'s', buf, 7, self.IPAddr)
struct.pack_into('!'+ str(len(self.Mask)) + 's', buf, 39, self.Mask)
struct.pack_into('!'+ str(len(self.Gateway)) + 's', buf, 71, self.Gateway)
struct.pack_into('B',buf,103,self.interface)
struct.pack_into('!B',buf, 104, self.flag)
return buf.raw
def PackPacket(self):
snh = struct.pack("!L", self.sn)
PacketHeader = LMGeneratePacketHeader(self.target)
PacketContent = self.PackContent()
confirm = Confirm()
packet_send = snh + PacketHeader + PacketContent + confirm
return packet_send
def ParsePacket(self, packet_receive):
ip_header = packet_receive[0:20]
ip_protocol = unpack('!B',ip_header[9])[0]
if ip_protocol != 254:
return None
snh = packet_receive[20:24]
sn = unpack('!L', snh)[0]
if sn != self.sn + 1:
return None
content_receive_pack = packet_receive[152:156]
content_receive = unpack('BBH' , content_receive_pack)
FunCode = content_receive[0]
if FunCode != self.FunCode:
return None
Param = content_receive[1]
Length = content_receive[2] - 2
content_receive_data_head_pack = packet_receive[158:162]
content_receive_data_head = unpack('!BBH',content_receive_data_head_pack)
Return_Code = content_receive_data_head[0]
if Return_Code != self.Command_Code + 1:
return None
Status = content_receive_data_head[1]
print 'Status = ',Status
return Status
def SendAndReceive(self):
packet_send = self.PackPacket()
packet_receive = Transport.SocketTransport(packet_send, self.dest_host, self.sn)
if packet_receive == None: ## time out
return -2
status = self.ParsePacket(packet_receive)
if status == None:
return -1
else:
return status
class QueryPredeviceIP():
def __init__(self,target = 0):
self.dest_host = '0.0.0.0'
self.target = target
self.sn = GenerateSN()
# the
self.FunCode = 253
self.Param1 = 253 #parameters 1
self.Param2 = 1 #Length
self.Command_Code = 108
self.flag = 128 # 0x80
def PackContent(self):
buf = ctypes.create_string_buffer(16)
struct.pack_into('!BBHBB', buf, 0, self.FunCode, self.Param1, self.Param2, self.Command_Code, self.flag)
return buf.raw
def PackPacket(self):
snh = struct.pack("!L", self.sn)
PacketHeader = LMGeneratePacketHeader(self.target)
PacketContent = self.PackContent()
confirm = Confirm()
packet_send = snh + PacketHeader + PacketContent + confirm
return packet_send
def ParsePacket(self, packet_receive):
ip_header = packet_receive[0:20]
ip_protocol = unpack('!B',ip_header[9])[0]
if ip_protocol != 254:
return None
snh = packet_receive[20:24]
sn = unpack('!L', snh)[0]
if sn != self.sn + 1:
return None
content_receive_pack = packet_receive[152:156]
content_receive = unpack('BBH' , content_receive_pack)
FunCode = content_receive[0]
if FunCode != self.FunCode:
return None
Param = content_receive[1]
Length = content_receive[2] - 2
content_receive_data_head_pack = packet_receive[158:162]
content_receive_data_head = unpack('!BBH',content_receive_data_head_pack)
Return_Code = content_receive_data_head[0]
if Return_Code != self.Command_Code + 1:
return None
Status = content_receive_data_head[1]
ips = unpack('4s4s4s4s4s4s4s4s4s4s',packet_receive[162:202])
ipaddr1 = socket.inet_ntoa(ips[0])
ipmask1 = socket.inet_ntoa(ips[1])
ipaddr2 = socket.inet_ntoa(ips[2])
ipmask2 = socket.inet_ntoa(ips[3])
ipaddr3 = socket.inet_ntoa(ips[4])
ipmask3 = socket.inet_ntoa(ips[5])
ipaddr4 = socket.inet_ntoa(ips[6])
ipmask4 = socket.inet_ntoa(ips[7])
ipaddr5 = socket.inet_ntoa(ips[8])
ipmask5 = socket.inet_ntoa(ips[9])
return [Status,[ipaddr1,ipaddr2,ipaddr3,ipaddr4,ipaddr5],[ipmask1,ipmask2,ipmask3,ipmask4,ipmask5]]
def SendAndReceive(self):
packet_send = self.PackPacket()
packet_receive = Transport.SocketTransport(packet_send, self.dest_host, self.sn)
if packet_receive == None: ## time out
return [-2,[],[]]
status = self.ParsePacket(packet_receive)
if status == None:
return [-1,[],[]]
else:
return status
class QueryPredeviceRoute():
def __init__(self, target = 1):
self.dest_host = "0.0.0.0"
self.target = target
self.sn = GenerateSN()
# the
self.FunCode = 253
self.Param1 = 253 #parameters 1
self.Param2 = 1 #Length
self.Command_Code = 109
self.flag = 128 # 0x80
def PackContent(self):
buf = ctypes.create_string_buffer(16)
struct.pack_into('!BBHBB', buf, 0, self.FunCode, self.Param1, self.Param2, self.Command_Code, self.flag)
return buf.raw
def PackPacket(self):
snh = struct.pack("!L", self.sn)
PacketHeader = LMGeneratePacketHeader(self.target)
PacketContent = self.PackContent()
confirm = Confirm()
packet_send = snh + PacketHeader + PacketContent + confirm
return packet_send
def ParsePacket(self, packet_receive):
#Receive()
if len(packet_receive) < 254:
return None
ip_header = packet_receive[0:20]
ip_protocol = unpack('!B',ip_header[9])[0]
if ip_protocol != 254:
return None
snh = packet_receive[20:24]
sn = unpack('!L', snh)[0]
if sn != self.sn + 1:
return None
content_receive_pack = packet_receive[152:156]
content_receive = unpack('BBH' , content_receive_pack)
FunCode = content_receive[0]
if FunCode != self.FunCode:
return None
Param = content_receive[1]
Length = content_receive[2] - 2
content_receive_data_head_pack = packet_receive[158:162]
content_receive_data_head = unpack('!BBH',content_receive_data_head_pack)
Return_Code = content_receive_data_head[0]
if Return_Code != self.Command_Code + 1:
return None
Status = content_receive_data_head[1]
if Status == 0:
routes = LMRoute.query.all()
for route in routes:
db.session.delete(route)
db.session.commit()
route_num = unpack('i',packet_receive[162:166])[0]
route_table = packet_receive[166 : 166 + route_num * 60]
for index in range(route_num):
route_receive_pack = route_table[60 * index : 60 + 60 * index]
route_receive = unpack('i16s16s16s8s', route_receive_pack)
route_ipaddr = route_receive[1].strip('\x00').split('\x00')[0]
netmask = route_receive[2].strip('\x00').split('\x00')[0]
gateway = route_receive[3].strip('\x00').split('\x00')[0]
type_dict = ["网络路由","主机路由","默认路由"]
try:
routetype = type_dict[route_receive[0]]
except:
routetype = "未知"
interface = route_receive[4].strip('\x00').split('\x00')[0]
newrecord = LMRoute(route_ipaddr, netmask, gateway,routetype,interface)
db.session.add(newrecord)
db.session.commit()
return Status
def SendAndReceive(self):
packet_send = self.PackPacket()
packet_receive = Transport.SocketTransport(packet_send, self.dest_host, self.sn)
if packet_receive == None: ## time out
return -2
status = self.ParsePacket(packet_receive)
if status == None:
return -1
else:
return status
class CImportCert():
def __init__(self, parameters,target = 1):
self.dest_host = "0.0.0.0"
self.sn = GenerateSN()
self.target = target
self.FunCode = 253
self.Param1 = 253
self.Param2 = 2037
self.Command_Code = 4
self.Cert_Type = parameters[0]
self.Peer_Ip = (parameters[1] + '\x00' * 32)[:32]
self.Cert_Format = parameters[2]
self.Cert_Length = parameters[3]
self.Cert_Content = parameters[4]
self.flag = 128
def PackContent(self):
buf = ctypes.create_string_buffer(2048) ###change the size
Cert_Content = (self.Cert_Content + '\x00' * 2000)[:2000]
struct.pack_into('!BBHBB32sB', buf, 0, self.FunCode,self.Param1, self.Param2,self.Command_Code, self.Cert_Type, self.Peer_Ip, self.Cert_Format)
struct.pack_into('H2000sB', buf, 39, self.Cert_Length, Cert_Content ,self.flag)
return buf.raw
def PackPacket(self):
snh = struct.pack("!L", self.sn)
PacketHeader = LMGeneratePacketHeader(self.target)
PacketContent = self.PackContent()
confirm = Confirm()
packet_send = snh + PacketHeader + PacketContent + confirm
return packet_send
def ParsePacket(self, packet_receive):
ip_header = packet_receive[0:20]
ip_protocol = unpack('!B',ip_header[9])[0]
if ip_protocol != 254:
return None
snh = packet_receive[20:24]
sn = unpack('!L', snh)[0]
if sn != self.sn + 1:
return None
content_receive_pack = packet_receive[152:156]
content_receive = unpack('BBH' , content_receive_pack)
FunCode = content_receive[0]
if FunCode != self.FunCode:
return None
Param = content_receive[1]
Length = content_receive[2] - 2
content_receive_data_head_pack = packet_receive[158:162]
content_receive_data_head = unpack('!BBH',content_receive_data_head_pack)
Return_Code = content_receive_data_head[0]
if Return_Code != self.Command_Code + 1:
return None
Status = content_receive_data_head[1]
return Status
def SendAndReceive(self):
packet_send = self.PackPacket()
packet_receive = Transport.SocketTransport(packet_send, self.dest_host, self.sn)
if packet_receive == None: ## time out
return -2
status = self.ParsePacket(packet_receive)
if status == None:
return -1
else:
return status
#1.2.7 shan chu zheng shu
class CDeleteCert():
def __init__(self, parameters, target = 1 ):
self.id = id
self.dest_host = "0.0.0.0"
self.target = target
self.sn = GenerateSN()
# data of packet
self.FunCode = 253
self.Param1 = 253
self.Param2 = 33 #data lenth
self.Command_Code = 12 #uchar
self.IpAddr = parameters[0] #uchar[32], cert ip address
self.flag = 128
def PackContent(self):
buf = ctypes.create_string_buffer(48) ###change the size
IpAddr = (self.IpAddr + '\x00' * 32)[:32].encode('utf-8')
print self.IpAddr,"&&&&&&&&&&&&&&&&&"
print type(IpAddr)
content = struct.pack_into('!BBHB32sB', buf, 0, self.FunCode, self.Param1, self.Param2, \
self.Command_Code, IpAddr, self.flag)
############# End ##############
return buf.raw
def PackPacket(self):
snh = struct.pack("!L", self.sn)
PacketHeader = LMGeneratePacketHeader(self.target)
PacketContent = self.PackContent()
confirmh = Confirm()
packet_send = snh + PacketHeader + PacketContent + confirmh
return packet_send
def ParsePacket(self, packet_receive):
print repr(packet_receive)
ip_header = packet_receive[0:20]
ip_protocol = unpack('!B',ip_header[9])[0]
if ip_protocol != 254:
return None
snh = packet_receive[20:24]
sn = unpack('!L', snh)[0]
if sn != self.sn + 1:
return None
content_receive_head_pack = packet_receive[152:156]
content_receive_head = unpack('!BBH' , content_receive_head_pack)
FunCode = content_receive_head[0]
if FunCode != self.FunCode:
return None
Param = content_receive_head[1] #P=S
Length = content_receive_head[2] #L=Command_Code
content_receive_general_resp = unpack('!BB' , packet_receive[156:158])
Return_Code = content_receive_general_resp[0]
Status = content_receive_general_resp[1]
if Return_Code != self.Command_Code + 1:
return None
if Status == 0:
record = DPrivateCertInfo.query.filter_by(id=self.id, cert_name=self.IpAddr).first()
if record != None:
db.session.delete(record)
db.session.commit()
return Status
def SendAndReceive(self):
packet_send = self.PackPacket()
packet_receive = Transport.SocketTransport(packet_send, self.dest_host, self.sn)
if packet_receive == None: ## time out
return -2
status = self.ParsePacket(packet_receive)
if status == None:
return -1
else:
return status
class CRestartMachine():
def __init__(self,target = 1):
self.dest_host = '0.0.0.0'
self.sn = GenerateSN()
self.target = target
self.FunCode = 253
self.Param1 = 253
self.Param2 = 1
self.Command_Code = 24
self.flag = 128
def PackContent(self):
buf = ctypes.create_string_buffer(16)
struct.pack_into('!BBHBB', buf, 0, self.FunCode, self.Param1, self.Param2, self.Command_Code, self.flag)
return buf.raw
def PackPacket(self):
snh = struct.pack("!L", self.sn)
PacketHeader = LMGeneratePacketHeader(self.target)
PacketContent = self.PackContent()
confirm = Confirm()
packet_send = snh + PacketHeader + PacketContent + confirm
return packet_send
def ParsePacket(self, packet_receive):
ip_header = packet_receive[0:20]
ip_protocol = unpack('!B',ip_header[9])[0]
if ip_protocol != 254:
return None
snh = packet_receive[20:24]
sn = unpack('!L', snh)[0]
if sn != self.sn + 1:
return None
content_receive_pack = packet_receive[152:156]
content_receive = unpack('BBH' , content_receive_pack)
FunCode = content_receive[0]
if FunCode != self.FunCode:
return None
Param = content_receive[1]
Length = content_receive[2] - 2
content_receive_data_head_pack = packet_receive[158:162]
content_receive_data_head = unpack('!BBH',content_receive_data_head_pack)
Return_Code = content_receive_data_head[0]
Status = content_receive_data_head[1]
return Status
def SendAndReceive(self):
packet_send = self.PackPacket()
packet_receive = Transport.SocketTransport(packet_send, self.dest_host, self.sn,6)
if packet_receive == None: ## time out
return -2
status = self.ParsePacket(packet_receive)
if status == None:
return -1
else:
return status
class CQueryCertList():
def __init__(self,target = 0):
self.id = id
self.dest_host = "0.0.0.0"
self.target = target
self.sn = GenerateSN()
self.FunCode = 253
self.Param1 = 253
self.Param2 = 2
self.Command_Code = 5
self.Cert_Type = 6
self.flag = 128
def PackContent(self):
buf = ctypes.create_string_buffer(16)
content = struct.pack_into('!BBHBBB', buf, 0, self.FunCode, self.Param1, self.Param2, \
self.Command_Code, self.Cert_Type, self.flag)
return buf.raw
def PackPacket(self):
snh = struct.pack("!L", self.sn)
PacketHeader = LMGeneratePacketHeader(self.target)
PacketContent = self.PackContent()
confirmh = Confirm()
packet_send = snh + PacketHeader + PacketContent + confirmh
return packet_send
def ParsePacket(self, packet_receive):
ip_header = packet_receive[0:20]
ip_protocol = unpack('!B',ip_header[9])[0]
if ip_protocol != 254:
return None
snh = packet_receive[20:24]
sn = unpack('!L', snh)[0]
if sn != self.sn + 1:
return None
content_receive_pack = packet_receive[152:156]
content_receive = unpack('BBH' , content_receive_pack)
FunCode = content_receive[0]
if FunCode != self.FunCode:
return None
Param = content_receive[1]
Length = content_receive[2] - 2
content_receive_data_head_pack = packet_receive[158:162]
content_receive_data_head = unpack('!BBH',content_receive_data_head_pack)
Return_Code = content_receive_data_head[0]
Status = content_receive_data_head[1]
Cert_Number = unpack('!L',packet_receive[162:166])[0]
if Status == 0:
existrecords = Certificates.query.filter_by(id=self.id).all()
dict = {}
for existrecord in existrecords:
dict.update({existrecord.cert_name:existrecord})
existrecordlist = dict.keys()
for i in range(Cert_Number):
cert_list_receive_pack = packet_receive[166 + 30 * i : 196 + 30 * i]
cert_list_receive = unpack('!30s', cert_list_receive_pack)
Cert_FileName = cert_list_receive[0].strip('\x00').split('\x00') [0]
Cert_Type = 5
if Cert_FileName.find('DMS') != -1:
Cert_Type = 1
else:
Cert_Type = 5
if Cert_FileName not in existrecordlist:
newrecord = Certificates(Cert_FileName)
db.session.add(newrecord)
else:
existrecordlist.remove(Cert_FileName)
for cert_name in existrecordlist:
db.session.delete(dict[cert_name])
db.session.commit()
return Status
def SendAndReceive(self):
packet_send = self.PackPacket()
packet_receive = Transport.SocketTransport(packet_send, self.dest_host, self.sn)
if packet_receive == None:
return -2
status = self.ParsePacket(packet_receive)
if status == None:
return -1
else:
return status
class CGetCertPK():
def __init__(self,parameters,target = 0):
self.id = id
self.dest_host = "0.0.0.0"
self.target = target
self.sn = GenerateSN()
self.FunCode = 253
self.Param1 = 253
self.Param2 = 2008
self.Command_Code = 111
self.reserved = 0
self.Cert_length = parameters[0]
self.Cert_Content = parameters[1]
self.flag = 128
def PackContent(self):
buf = ctypes.create_string_buffer(2016)
struct.pack_into('!BBH',buf,0,self.FunCode,self.Param1,self.Param2)
struct.pack_into('!B',buf,4,self.Command_Code)
struct.pack_into('L',buf,8,self.Cert_length)
struct.pack_into(str(self.Cert_length) + 's', buf,12,self.Cert_Content)
struct.pack_into('B',buf,2012,self.flag)
return buf.raw
def PackPacket(self):
snh = struct.pack("!L", self.sn)
PacketHeader = LMGeneratePacketHeader(self.target)
PacketContent = self.PackContent()
confirmh = Confirm()
packet_send = snh + PacketHeader + PacketContent + confirmh
return packet_send
def ParsePacket(self, packet_receive):
ip_header = packet_receive[0:20]
ip_protocol = unpack('!B',ip_header[9])[0]
if ip_protocol != 254:
return None
snh = packet_receive[20:24]
sn = unpack('!L', snh)[0]
if sn != self.sn + 1:
return None
content_receive_pack = packet_receive[152:156]
content_receive = unpack('BBH' , content_receive_pack)
FunCode = content_receive[0]
if FunCode != self.FunCode:
return None
Param = content_receive[1]
Length = content_receive[2] - 2
content_receive_data_head_pack = packet_receive[158:162]
content_receive_data_head = unpack('!BBH',content_receive_data_head_pack)
Return_Code = content_receive_data_head[0]
Status = content_receive_data_head[1]
pk = ""
if Status == 0:
pk = unpack('64s',packet_receive[162:162+64])[0]
print 'pk = ',pk,"*&&*&*&*&*&*&*&*&*&*&*&&"
return [Status,pk]
def SendAndReceive(self):
packet_send = self.PackPacket()
packet_receive = Transport.SocketTransport(packet_send, self.dest_host, self.sn)
if packet_receive == None: ## time out
return [-2,""]
status = self.ParsePacket(packet_receive)
if status == None:
return [-1,""]
else:
return status
| 38.11412 | 204 | 0.580081 | 4,533 | 39,410 | 4.846018 | 0.066402 | 0.103883 | 0.067192 | 0.06009 | 0.806938 | 0.791232 | 0.779169 | 0.767925 | 0.758957 | 0.74876 | 0 | 0.04836 | 0.316849 | 39,410 | 1,033 | 205 | 38.151016 | 0.767559 | 0.013677 | 0 | 0.776796 | 0 | 0 | 0.020586 | 0.001855 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.018785 | null | null | 0.01105 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
a08e0c684a6fe55913e2601bf307bb8730d6fdff | 4,698 | py | Python | points.py | windancert/line_triangle_projection | 01f4294c42431c7b5ba7610f8b3dffe701382ccb | [
"MIT"
] | null | null | null | points.py | windancert/line_triangle_projection | 01f4294c42431c7b5ba7610f8b3dffe701382ccb | [
"MIT"
] | null | null | null | points.py | windancert/line_triangle_projection | 01f4294c42431c7b5ba7610f8b3dffe701382ccb | [
"MIT"
] | null | null | null | # This Python file uses the following encoding: utf-8
import math
from abc import ABC, abstractmethod
from numbers import Number
class AbstractPoint(ABC):
@property
@abstractmethod
def dimension(self):
pass
@property
@abstractmethod
def elements(self):
pass
@abstractmethod
def dot(self, other):
pass
class Point2D(AbstractPoint):
def __init__(self, x, y):
super().__init__()
self.__x = x
self.__y = y
@property
def dimension(self):
return 2
@property
def elements(self):
return [self.__x, self.__y]
@property
def x(self):
return self.__x
@property
def y(self):
return self.__y
def normalize(self):
s = math.sqrt(self.__x**2 + self.__y**2)
result = self/s
return result
def dot(self, other):
return self.x * other.x + self.y * other.y
def __add__(self, other):
if not isinstance(other, Point2D):
raise TypeError(f"Unable to add {type(other).__name__} to Point2D")
return Point2D(self.__x + other.__x, self.__y + other.__y)
def __sub__(self, other):
if not isinstance(other, Point2D):
raise TypeError(f"Unable to subtract {type(other).__name__} from Point2D")
return Point2D(self.__x - other.__x, self.__y - other.__y)
def __neg__(self):
return Zero2D-self
def __mul__(self, other):
if not isinstance(other, Number):
raise TypeError(f"Unable to multiply {type(other).__name__} with Point2D")
return Point2D(other * self.__x, other * self.__y)
__rmul__ = __mul__
def __truediv__(self, other):
if not isinstance(other, Number):
raise TypeError(f"Unable to divide Point2D with {type(other).__name__}")
return Point2D(self.__x / other, self.__y / other)
def __floordiv__(self, other):
if not isinstance(other, Number):
raise TypeError(f"Unable to do integer division on Point2D with {type(other).__name__}")
return Point2D(self.__x // other, self.__y // other)
def __str__(self):
return f"({self.__x}, {self.__y})"
Zero2D = Point2D(0, 0)
class Point3D(AbstractPoint):
def __init__(self, x, y, z):
super().__init__()
self.__x = x
self.__y = y
self.__z = z
@property
def dimension(self):
return 3
@property
def elements(self):
return [self.__x, self.__y, self.__z]
@property
def x(self):
return self.__x
@property
def y(self):
return self.__y
@property
def z(self):
return self.__z
def normalize(self):
s = math.sqrt(self.__x**2 + self.__y**2 + self.__z**2)
result = self/s
return result
def dot(self, other):
return self.x * other.x + self.y * other.y + self.z * other.z
def cross(self, other):
x = self.y * other.z - self.z * other.y
y = self.z * other.x - self.x * other.z
z = self.x * other.y - self.y * other.x
return Point3D(x, y, z)
def __add__(self, other):
if not isinstance(other, Point3D):
raise TypeError(f"Unable to add {type(other).__name__} to Point3D")
return Point3D(self.__x + other.__x, self.__y + other.__y, self.__z + other.__z)
def __sub__(self, other):
if not isinstance(other, Point3D):
raise TypeError(f"Unable to subtract {type(other).__name__} to Point3D")
return Point3D(self.__x - other.__x, self.__y - other.__y, self.__z - other.__z)
def __neg__(self):
return Zero3D-self
def __mul__(self, other):
if not isinstance(other, Number):
raise TypeError(f"Unable to multiply {type(other).__name__} with Point3D")
return Point3D(other * self.__x, other * self.__y, other * self.__z)
__rmul__ = __mul__
def __truediv__(self, other):
if not isinstance(other, Number):
raise TypeError(f"Unable to divide Point3D with {type(other).__name__}")
return Point3D(self.__x / other, self.__y / other, self.__z / other)
def __floordiv__(self, other):
if not isinstance(other, Number):
raise TypeError(f"Unable to do integer division on Point3D with {type(other).__name__}")
return Point3D(self.__x // other, self.__y // other, self.__z // other)
def __str__(self):
return f"({self.__x}, {self.__y}, {self.__z})"
Zero3D = Point3D(0, 0, 0)
| 29 | 101 | 0.585355 | 603 | 4,698 | 4.149254 | 0.116086 | 0.051958 | 0.055955 | 0.055955 | 0.798961 | 0.768585 | 0.733813 | 0.733813 | 0.701039 | 0.657074 | 0 | 0.014037 | 0.302469 | 4,698 | 161 | 102 | 29.180124 | 0.749466 | 0.010856 | 0 | 0.590164 | 0 | 0 | 0.135593 | 0.049063 | 0 | 0 | 0 | 0 | 0 | 1 | 0.270492 | false | 0.02459 | 0.02459 | 0.122951 | 0.565574 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 7 |
a0ac6c9fb8006b4b4b613b9af56b83a0d3c08cc3 | 158 | py | Python | echome/kube/exceptions.py | jasoncolburne/echome | a5ab87666ae859d1ca8e4902d5c441c0ce36547a | [
"MIT"
] | null | null | null | echome/kube/exceptions.py | jasoncolburne/echome | a5ab87666ae859d1ca8e4902d5c441c0ce36547a | [
"MIT"
] | null | null | null | echome/kube/exceptions.py | jasoncolburne/echome | a5ab87666ae859d1ca8e4902d5c441c0ce36547a | [
"MIT"
] | null | null | null | class ClusterDoesNotExist(Exception):
pass
class ClusterGetConfigurationError(Exception):
pass
class ClusterConfigurationError(Exception):
pass
| 17.555556 | 46 | 0.797468 | 12 | 158 | 10.5 | 0.5 | 0.309524 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.14557 | 158 | 8 | 47 | 19.75 | 0.933333 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.5 | 0 | 0 | 0.5 | 0 | 1 | 0 | 1 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
a0bd2f2c65d8fe834cb15c21487c30f8e258cb53 | 13,839 | py | Python | krake/krake/client/core.py | rak-n-rok/Krake | 2f0d4a382b99639e2c1149ee8593a9bb589d2d3f | [
"Apache-2.0"
] | 1 | 2020-05-29T08:43:32.000Z | 2020-05-29T08:43:32.000Z | krake/krake/client/core.py | rak-n-rok/Krake | 2f0d4a382b99639e2c1149ee8593a9bb589d2d3f | [
"Apache-2.0"
] | null | null | null | krake/krake/client/core.py | rak-n-rok/Krake | 2f0d4a382b99639e2c1149ee8593a9bb589d2d3f | [
"Apache-2.0"
] | 1 | 2019-11-19T13:39:02.000Z | 2019-11-19T13:39:02.000Z | from krake.client import Watcher, ApiClient
from krake.data.core import (
GlobalMetricsProvider,
Role,
RoleList,
GlobalMetric,
GlobalMetricList,
RoleBinding,
RoleBindingList,
GlobalMetricsProviderList,
)
class CoreApi(ApiClient):
"""Core API client
Example:
.. code:: python
from krake.client import Client
with Client(url="http://localhost:8080") as client:
core_api = CoreApi(client)
Args:
client (krake.client.Client): API client for accessing the Krake HTTP API
"""
plurals = {
"GlobalMetric": "GlobalMetrics",
"GlobalMetricsProvider": "GlobalMetricsProviders",
"Role": "Roles",
"RoleBinding": "RoleBindings",
}
async def create_global_metric(self, body):
"""Creates the specified GlobalMetric.
Args:
body (GlobalMetric): Body of the HTTP request.
Returns:
GlobalMetric: Body of the HTTP response.
"""
path = "/core/globalmetrics".format()
url = self.client.url.with_path(path)
resp = await self.client.session.request("POST", url, json=body.serialize())
data = await resp.json()
return GlobalMetric.deserialize(data)
async def delete_global_metric(self, name):
"""Deletes the specified GlobalMetric.
Args:
name (str): name of the GlobalMetric.
Returns:
GlobalMetric: Body of the HTTP response.
"""
path = "/core/globalmetrics/{name}".format(name=name)
url = self.client.url.with_path(path)
resp = await self.client.session.request("DELETE", url)
if resp.status == 204:
return None
data = await resp.json()
return GlobalMetric.deserialize(data)
async def list_global_metrics(self):
"""Lists the GlobalMetrics in the namespace.
Returns:
GlobalMetricList: Body of the HTTP response.
"""
path = "/core/globalmetrics".format()
url = self.client.url.with_path(path)
resp = await self.client.session.request("GET", url)
data = await resp.json()
return GlobalMetricList.deserialize(data)
def watch_global_metrics(self, heartbeat=None):
"""Generates a watcher for the GlobalMetrics in the namespace.
Args:
heartbeat (int): Number of seconds after which the server sends a heartbeat
in form of an empty newline. Passing 0 disables the heartbeat. Default:
10 seconds
Returns:
GlobalMetricList: Body of the HTTP response.
"""
path = "/core/globalmetrics".format()
query = {"watch": ""}
if heartbeat is not None:
query["heartbeat"] = heartbeat
url = self.client.url.with_path(path).with_query(query)
return Watcher(self.client.session, url, GlobalMetric)
async def read_global_metric(self, name):
"""Reads the specified GlobalMetric.
Args:
name (str): name of the GlobalMetric.
Returns:
GlobalMetric: Body of the HTTP response.
"""
path = "/core/globalmetrics/{name}".format(name=name)
url = self.client.url.with_path(path)
resp = await self.client.session.request("GET", url)
data = await resp.json()
return GlobalMetric.deserialize(data)
async def update_global_metric(self, body, name):
"""Updates the specified GlobalMetric.
Args:
body (GlobalMetric): Body of the HTTP request.
name (str): name of the GlobalMetric.
Returns:
GlobalMetric: Body of the HTTP response.
"""
path = "/core/globalmetrics/{name}".format(name=name)
url = self.client.url.with_path(path)
resp = await self.client.session.request("PUT", url, json=body.serialize())
data = await resp.json()
return GlobalMetric.deserialize(data)
async def create_global_metrics_provider(self, body):
"""Creates the specified GlobalMetricsProvider.
Args:
body (GlobalMetricsProvider): Body of the HTTP request.
Returns:
GlobalMetricsProvider: Body of the HTTP response.
"""
path = "/core/globalmetricsproviders".format()
url = self.client.url.with_path(path)
resp = await self.client.session.request("POST", url, json=body.serialize())
data = await resp.json()
return GlobalMetricsProvider.deserialize(data)
async def delete_global_metrics_provider(self, name):
"""Deletes the specified GlobalMetricsProvider.
Args:
name (str): name of the GlobalMetricsProvider.
Returns:
GlobalMetricsProvider: Body of the HTTP response.
"""
path = "/core/globalmetricsproviders/{name}".format(name=name)
url = self.client.url.with_path(path)
resp = await self.client.session.request("DELETE", url)
if resp.status == 204:
return None
data = await resp.json()
return GlobalMetricsProvider.deserialize(data)
async def list_global_metrics_providers(self):
"""Lists the GlobalMetricsProviders in the namespace.
Returns:
GlobalMetricsProviderList: Body of the HTTP response.
"""
path = "/core/globalmetricsproviders".format()
url = self.client.url.with_path(path)
resp = await self.client.session.request("GET", url)
data = await resp.json()
return GlobalMetricsProviderList.deserialize(data)
def watch_global_metrics_providers(self, heartbeat=None):
"""Generates a watcher for the GlobalMetricsProviders in the namespace.
Args:
heartbeat (int): Number of seconds after which the server sends a heartbeat
in form of an empty newline. Passing 0 disables the heartbeat. Default:
10 seconds
Returns:
GlobalMetricsProviderList: Body of the HTTP response.
"""
path = "/core/globalmetricsproviders".format()
query = {"watch": ""}
if heartbeat is not None:
query["heartbeat"] = heartbeat
url = self.client.url.with_path(path).with_query(query)
return Watcher(self.client.session, url, GlobalMetricsProvider)
async def read_global_metrics_provider(self, name):
"""Reads the specified GlobalMetricsProvider.
Args:
name (str): name of the GlobalMetricsProvider.
Returns:
GlobalMetricsProvider: Body of the HTTP response.
"""
path = "/core/globalmetricsproviders/{name}".format(name=name)
url = self.client.url.with_path(path)
resp = await self.client.session.request("GET", url)
data = await resp.json()
return GlobalMetricsProvider.deserialize(data)
async def update_global_metrics_provider(self, body, name):
"""Updates the specified GlobalMetricsProvider.
Args:
body (GlobalMetricsProvider): Body of the HTTP request.
name (str): name of the GlobalMetricsProvider.
Returns:
GlobalMetricsProvider: Body of the HTTP response.
"""
path = "/core/globalmetricsproviders/{name}".format(name=name)
url = self.client.url.with_path(path)
resp = await self.client.session.request("PUT", url, json=body.serialize())
data = await resp.json()
return GlobalMetricsProvider.deserialize(data)
async def create_role(self, body):
"""Creates the specified Role.
Args:
body (Role): Body of the HTTP request.
Returns:
Role: Body of the HTTP response.
"""
path = "/core/roles".format()
url = self.client.url.with_path(path)
resp = await self.client.session.request("POST", url, json=body.serialize())
data = await resp.json()
return Role.deserialize(data)
async def delete_role(self, name):
"""Deletes the specified Role.
Args:
name (str): name of the Role.
Returns:
Role: Body of the HTTP response.
"""
path = "/core/roles/{name}".format(name=name)
url = self.client.url.with_path(path)
resp = await self.client.session.request("DELETE", url)
if resp.status == 204:
return None
data = await resp.json()
return Role.deserialize(data)
async def list_roles(self):
"""Lists the Roles in the namespace.
Returns:
RoleList: Body of the HTTP response.
"""
path = "/core/roles".format()
url = self.client.url.with_path(path)
resp = await self.client.session.request("GET", url)
data = await resp.json()
return RoleList.deserialize(data)
def watch_roles(self, heartbeat=None):
"""Generates a watcher for the Roles in the namespace.
Args:
heartbeat (int): Number of seconds after which the server sends a heartbeat
in form of an empty newline. Passing 0 disables the heartbeat. Default:
10 seconds
Returns:
RoleList: Body of the HTTP response.
"""
path = "/core/roles".format()
query = {"watch": ""}
if heartbeat is not None:
query["heartbeat"] = heartbeat
url = self.client.url.with_path(path).with_query(query)
return Watcher(self.client.session, url, Role)
async def read_role(self, name):
"""Reads the specified Role.
Args:
name (str): name of the Role.
Returns:
Role: Body of the HTTP response.
"""
path = "/core/roles/{name}".format(name=name)
url = self.client.url.with_path(path)
resp = await self.client.session.request("GET", url)
data = await resp.json()
return Role.deserialize(data)
async def update_role(self, body, name):
"""Updates the specified Role.
Args:
body (Role): Body of the HTTP request.
name (str): name of the Role.
Returns:
Role: Body of the HTTP response.
"""
path = "/core/roles/{name}".format(name=name)
url = self.client.url.with_path(path)
resp = await self.client.session.request("PUT", url, json=body.serialize())
data = await resp.json()
return Role.deserialize(data)
async def create_role_binding(self, body):
"""Creates the specified RoleBinding.
Args:
body (RoleBinding): Body of the HTTP request.
Returns:
RoleBinding: Body of the HTTP response.
"""
path = "/core/rolebindings".format()
url = self.client.url.with_path(path)
resp = await self.client.session.request("POST", url, json=body.serialize())
data = await resp.json()
return RoleBinding.deserialize(data)
async def delete_role_binding(self, name):
"""Deletes the specified RoleBinding.
Args:
name (str): name of the RoleBinding.
Returns:
RoleBinding: Body of the HTTP response.
"""
path = "/core/rolebindings/{name}".format(name=name)
url = self.client.url.with_path(path)
resp = await self.client.session.request("DELETE", url)
if resp.status == 204:
return None
data = await resp.json()
return RoleBinding.deserialize(data)
async def list_role_bindings(self):
"""Lists the RoleBindings in the namespace.
Returns:
RoleBindingList: Body of the HTTP response.
"""
path = "/core/rolebindings".format()
url = self.client.url.with_path(path)
resp = await self.client.session.request("GET", url)
data = await resp.json()
return RoleBindingList.deserialize(data)
def watch_role_bindings(self, heartbeat=None):
"""Generates a watcher for the RoleBindings in the namespace.
Args:
heartbeat (int): Number of seconds after which the server sends a heartbeat
in form of an empty newline. Passing 0 disables the heartbeat. Default:
10 seconds
Returns:
RoleBindingList: Body of the HTTP response.
"""
path = "/core/rolebindings".format()
query = {"watch": ""}
if heartbeat is not None:
query["heartbeat"] = heartbeat
url = self.client.url.with_path(path).with_query(query)
return Watcher(self.client.session, url, RoleBinding)
async def read_role_binding(self, name):
"""Reads the specified RoleBinding.
Args:
name (str): name of the RoleBinding.
Returns:
RoleBinding: Body of the HTTP response.
"""
path = "/core/rolebindings/{name}".format(name=name)
url = self.client.url.with_path(path)
resp = await self.client.session.request("GET", url)
data = await resp.json()
return RoleBinding.deserialize(data)
async def update_role_binding(self, body, name):
"""Updates the specified RoleBinding.
Args:
body (RoleBinding): Body of the HTTP request.
name (str): name of the RoleBinding.
Returns:
RoleBinding: Body of the HTTP response.
"""
path = "/core/rolebindings/{name}".format(name=name)
url = self.client.url.with_path(path)
resp = await self.client.session.request("PUT", url, json=body.serialize())
data = await resp.json()
return RoleBinding.deserialize(data)
| 29.889849 | 87 | 0.604596 | 1,527 | 13,839 | 5.428946 | 0.069417 | 0.057901 | 0.034741 | 0.050181 | 0.896502 | 0.844632 | 0.813752 | 0.80965 | 0.79035 | 0.79035 | 0 | 0.002869 | 0.294891 | 13,839 | 462 | 88 | 29.954545 | 0.84669 | 0.107739 | 0 | 0.712644 | 0 | 0 | 0.093327 | 0.046543 | 0 | 0 | 0 | 0 | 0 | 1 | 0.022989 | false | 0 | 0.011494 | 0 | 0.206897 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
26111634c4a547acc377eefb0d5c83bb9d44f248 | 327 | py | Python | intralinks_test/folders_v2_test.py | ilapi/intralinks-sdk-python | dcf789d874d907833aa48557309c3e12e4703a4e | [
"MIT"
] | 3 | 2019-02-28T14:46:54.000Z | 2021-01-07T22:41:35.000Z | intralinks_test/folders_v2_test.py | ilapi/intralinks-sdk-python | dcf789d874d907833aa48557309c3e12e4703a4e | [
"MIT"
] | 5 | 2018-12-12T10:08:18.000Z | 2018-12-17T13:45:56.000Z | intralinks_test/folders_v2_test.py | ilapi/intralinks-sdk-python | dcf789d874d907833aa48557309c3e12e4703a4e | [
"MIT"
] | 1 | 2019-02-28T14:46:35.000Z | 2019-02-28T14:46:35.000Z | import intralinks_test.folders_helper
def test_create_update_delete_folder(v2_client, test_data):
intralinks_test.folders_helper.test_create_update_delete_folder(v2_client, test_data)
def test_create_delete_folders(v2_client, test_data):
intralinks_test.folders_helper.test_create_delete_folders(v2_client, test_data) | 46.714286 | 89 | 0.877676 | 49 | 327 | 5.285714 | 0.265306 | 0.15444 | 0.185328 | 0.247104 | 0.849421 | 0.849421 | 0.849421 | 0.849421 | 0.648649 | 0.409266 | 0 | 0.013072 | 0.06422 | 327 | 7 | 90 | 46.714286 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0.2 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 9 |
2673bd5b7a536bd80181e510f1df11cb4e3848ee | 3,066 | py | Python | meilisearch/_httprequests.py | curquiza/meilisearch-python | e309e0a34ce0dd5aa9c3fc1963c3e83993f5f74d | [
"MIT"
] | null | null | null | meilisearch/_httprequests.py | curquiza/meilisearch-python | e309e0a34ce0dd5aa9c3fc1963c3e83993f5f74d | [
"MIT"
] | null | null | null | meilisearch/_httprequests.py | curquiza/meilisearch-python | e309e0a34ce0dd5aa9c3fc1963c3e83993f5f74d | [
"MIT"
] | null | null | null | import requests
class HttpRequests:
@staticmethod
def get(config, path):
try:
r = requests.get(
config.url + '/' + path,
headers={
'x-meili-api-key': config.apikey,
'content-type': 'application/json'
}
)
r.raise_for_status()
return r
except requests.exceptions.HTTPError as err:
raise Exception(err)
except requests.exceptions.ConnectionError as err:
raise Exception(err)
@staticmethod
def post(config, path, body=None):
try:
if body is None:
body = {}
r = requests.post(
config.url + '/' + path,
headers={
'x-meili-api-key': config.apikey,
'content-type': 'application/json'
},
json=body
)
r.raise_for_status()
return r
except requests.exceptions.HTTPError as err:
raise Exception(err)
except requests.exceptions.ConnectionError as err:
raise Exception(err)
@staticmethod
def put(config, path, body=None):
try:
if body is None:
body = {}
r = requests.put(
config.url + '/' + path,
headers={
'x-meili-api-key': config.apikey,
'content-type': 'application/json'
},
json=body
)
r.raise_for_status()
return r
except requests.exceptions.HTTPError as err:
raise Exception(err)
except requests.exceptions.ConnectionError as err:
raise Exception(err)
@staticmethod
def patch(config, path, body=None):
try:
if body is None:
body = {}
r = requests.patch(
config.url + '/' + path,
headers={
'x-meili-api-key': config.apikey,
'content-type': 'application/json'
},
json=body
)
r.raise_for_status()
return r
except requests.exceptions.HTTPError as err:
raise Exception(err)
except requests.exceptions.ConnectionError as err:
raise Exception(err)
@staticmethod
def delete(config, path, body=None):
try:
if body is None:
body = {}
r = requests.delete(
config.url + '/' + path,
headers={
'x-meili-api-key': config.apikey,
'content-type': 'application/json'
},
json=body
)
r.raise_for_status()
return r
except requests.exceptions.HTTPError as err:
raise Exception(err)
except requests.exceptions.ConnectionError as err:
raise Exception(err)
| 30.356436 | 58 | 0.468363 | 275 | 3,066 | 5.185455 | 0.149091 | 0.098177 | 0.168303 | 0.13324 | 0.922861 | 0.922861 | 0.922861 | 0.922861 | 0.922861 | 0.922861 | 0 | 0 | 0.442596 | 3,066 | 100 | 59 | 30.66 | 0.834406 | 0 | 0 | 0.765957 | 0 | 0 | 0.071755 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.053191 | false | 0 | 0.010638 | 0 | 0.12766 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
cd0ea3c223e2c39b9049e3e32862bbb263229e10 | 96 | py | Python | mhkit/loads/__init__.py | akeeste/MHKiT-Python | db25af603547b764b2f85e957a5b0621cea5fd10 | [
"BSD-3-Clause"
] | 3 | 2020-01-15T16:21:26.000Z | 2020-01-28T17:10:13.000Z | mhkit/loads/__init__.py | akeeste/MHKiT-Python | db25af603547b764b2f85e957a5b0621cea5fd10 | [
"BSD-3-Clause"
] | null | null | null | mhkit/loads/__init__.py | akeeste/MHKiT-Python | db25af603547b764b2f85e957a5b0621cea5fd10 | [
"BSD-3-Clause"
] | 4 | 2020-01-15T16:24:04.000Z | 2020-01-15T20:45:22.000Z | from mhkit.loads import general
from mhkit.loads import graphics
from mhkit.loads import extreme | 32 | 32 | 0.854167 | 15 | 96 | 5.466667 | 0.466667 | 0.329268 | 0.512195 | 0.731707 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.114583 | 96 | 3 | 33 | 32 | 0.964706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
cd4971aba0e6469b17ddf14ae0c7db806b77598e | 11,496 | py | Python | ksteta3pi/Consideredbkg/MC_12_11104124_MagDown.py | Williams224/davinci-scripts | 730642d2ff13543eca4073a4ce0932631195de56 | [
"MIT"
] | null | null | null | ksteta3pi/Consideredbkg/MC_12_11104124_MagDown.py | Williams224/davinci-scripts | 730642d2ff13543eca4073a4ce0932631195de56 | [
"MIT"
] | null | null | null | ksteta3pi/Consideredbkg/MC_12_11104124_MagDown.py | Williams224/davinci-scripts | 730642d2ff13543eca4073a4ce0932631195de56 | [
"MIT"
] | null | null | null | #-- GAUDI jobOptions generated on Fri Jul 17 16:39:13 2015
#-- Contains event types :
#-- 11104124 - 98 files - 1034759 events - 222.31 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass Step-124620
#-- StepId : 124620
#-- StepName : Digi13 with G4 dE/dx
#-- ApplicationName : Boole
#-- ApplicationVersion : v26r3
#-- OptionFiles : $APPCONFIGOPTS/Boole/Default.py;$APPCONFIGOPTS/Boole/DataType-2012.py;$APPCONFIGOPTS/Boole/Boole-SiG4EnergyDeposit.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-124630
#-- StepId : 124630
#-- StepName : Stripping20-NoPrescalingFlagged for Sim08
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v32r2p1
#-- OptionFiles : $APPCONFIGOPTS/DaVinci/DV-Stripping20-Stripping-MC-NoPrescaling.py;$APPCONFIGOPTS/DaVinci/DataType-2012.py;$APPCONFIGOPTS/DaVinci/InputType-DST.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-125877
#-- StepId : 125877
#-- StepName : L0 emulation - TCK 003d
#-- ApplicationName : Moore
#-- ApplicationVersion : v20r4
#-- OptionFiles : $APPCONFIGOPTS/L0App/L0AppSimProduction.py;$APPCONFIGOPTS/L0App/L0AppTCK-0x003d.py;$APPCONFIGOPTS/L0App/DataType-2012.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r200
#-- Visible : N
#-- Processing Pass Step-127200
#-- StepId : 127200
#-- StepName : TCK-0x4097003d Flagged for Sim08 2012
#-- ApplicationName : Moore
#-- ApplicationVersion : v14r2p1
#-- OptionFiles : $APPCONFIGOPTS/Moore/MooreSimProductionForSeparateL0AppStep.py;$APPCONFIGOPTS/Conditions/TCK-0x4097003d.py;$APPCONFIGOPTS/Moore/DataType-2012.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r206
#-- Visible : Y
#-- Processing Pass Step-127147
#-- StepId : 127147
#-- StepName : Sim08g - 2012 - MD - Pythia8
#-- ApplicationName : Gauss
#-- ApplicationVersion : v45r9
#-- OptionFiles : $APPCONFIGOPTS/Gauss/Sim08-Beam4000GeV-md100-2012-nu2.5.py;$DECFILESROOT/options/@{eventType}.py;$LBPYTHIA8ROOT/options/Pythia8.py;$APPCONFIGOPTS/Gauss/G4PL_FTFP_BERT_EmNoCuts.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : dddb-20130929-1
#-- CONDDB : sim-20130522-1-vc-md100
#-- ExtraPackages : AppConfig.v3r205;DecFiles.v27r37
#-- Visible : Y
#-- Processing Pass Step-124834
#-- StepId : 124834
#-- StepName : Reco14a for MC
#-- ApplicationName : Brunel
#-- ApplicationVersion : v43r2p7
#-- OptionFiles : $APPCONFIGOPTS/Brunel/DataType-2012.py;$APPCONFIGOPTS/Brunel/MC-WithTruth.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles(['LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000001_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000002_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000003_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000004_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000005_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000006_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000007_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000008_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000009_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000010_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000011_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000012_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000013_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000014_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000015_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000016_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000029_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000030_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000031_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000032_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000033_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000034_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000035_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000036_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000037_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000038_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000039_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000040_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000041_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000042_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000043_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000044_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000045_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000046_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000047_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000048_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000049_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000050_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000051_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000052_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000053_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000054_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000055_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000056_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000057_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000058_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000059_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000060_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000061_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000062_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000063_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000064_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000065_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000066_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000067_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000068_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000069_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000070_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000071_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000072_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000073_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000074_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000075_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000076_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000077_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000078_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000079_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000080_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000081_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000082_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000083_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000084_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000085_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000086_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000087_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000088_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000089_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000090_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000091_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000092_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000093_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000094_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000095_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000096_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000097_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000098_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000099_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000100_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000101_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000102_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000103_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000104_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000105_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000106_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000107_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000108_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000109_1.allstreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043575/0000/00043575_00000110_1.allstreams.dst'
], clear=True)
| 61.475936 | 247 | 0.795755 | 1,594 | 11,496 | 5.614178 | 0.148683 | 0.284725 | 0.098559 | 0.142362 | 0.72902 | 0.723209 | 0.723209 | 0.723209 | 0.717846 | 0.695273 | 0 | 0.320362 | 0.047756 | 11,496 | 186 | 248 | 61.806452 | 0.497123 | 0.252871 | 0 | 0 | 1 | 0.970297 | 0.931722 | 0.931253 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.019802 | 0 | 0.019802 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 13 |
cd567ccd93febc2235157a27b451018ec6921d1f | 20,314 | py | Python | layers/functions/detection.py | Feywell/association_lstm_implement | 4e439bd934dc865aad0015a897980a8f124602af | [
"MIT"
] | 1 | 2020-07-16T00:47:03.000Z | 2020-07-16T00:47:03.000Z | layers/functions/detection.py | Feywell/association_lstm_implement | 4e439bd934dc865aad0015a897980a8f124602af | [
"MIT"
] | null | null | null | layers/functions/detection.py | Feywell/association_lstm_implement | 4e439bd934dc865aad0015a897980a8f124602af | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import torch
from torch.autograd import Function
from ..box_utils import match, decode, nms
from data import voc as cfg
import numpy as np
import torch.nn as nn
class Detect(Function):
"""At test time, Detect is the final layer of SSD. Decode location preds,
apply non-maximum suppression to location predictions based on conf
scores and threshold to a top_k number of output predictions for both
confidence score and locations.
"""
def __init__(self, num_classes, bkg_label, top_k, conf_thresh, nms_thresh):
self.num_classes = num_classes
self.background_label = bkg_label
self.top_k = top_k
# Parameters used in nms.
self.nms_thresh = nms_thresh
if nms_thresh <= 0:
raise ValueError('nms_threshold must be non negative.')
self.conf_thresh = conf_thresh
self.variance = cfg['variance']
def forward(self, loc_data, conf_data, prior_data):
"""
Args:
loc_data: (tensor) Loc preds from loc layers
Shape: [batch,num_priors*4]
conf_data: (tensor) Shape: Conf preds from conf layers
Shape: [batch*num_priors,num_classes]
prior_data: (tensor) Prior boxes and variances from priorbox layers
Shape: [1,num_priors,4]
"""
num = loc_data.size(0) # batch size
num_priors = prior_data.size(0)
output = torch.zeros(num, self.num_classes, self.top_k, 5)
conf_preds = conf_data.view(num, num_priors,
self.num_classes).transpose(2, 1)
# Decode predictions into bboxes.
for i in range(num):
decoded_boxes = decode(loc_data[i], prior_data, self.variance)
# For each class, perform nms
conf_scores = conf_preds[i].clone()
for cl in range(1, self.num_classes):
c_mask = conf_scores[cl].gt(self.conf_thresh)
scores = conf_scores[cl][c_mask]
if scores.dim() == 0 or scores.size() == torch.Size([0]):
continue
l_mask = c_mask.unsqueeze(1).expand_as(decoded_boxes)
boxes = decoded_boxes[l_mask].view(-1, 4)
# idx of highest scoring and non-overlapping boxes per class
ids, count = nms(boxes, scores, self.nms_thresh, self.top_k)
output[i, cl, :count] = \
torch.cat((scores[ids[:count]].unsqueeze(1),
boxes[ids[:count]]), 1)
flt = output.contiguous().view(num, -1, 5)
_, idx = flt[:, :, 0].sort(1, descending=True)
_, rank = idx.sort(1)
flt[(rank < self.top_k).unsqueeze(-1).expand_as(flt)].fill_(0)
return output
class Trnsform_target(nn.Module):
"""
调用时不应该对 conf_data 采用softmax
"""
def __init__(self, num_classes, top_k, overlap_thresh,conf_thresh, nms_thresh, use_gpu=True):
super(Trnsform_target, self).__init__()
self.num_classes = num_classes
self.top_k = top_k
self.threshold = overlap_thresh
# Parameters used in nms.
self.nms_thresh = nms_thresh
if nms_thresh <= 0:
raise ValueError('nms_threshold must be non negative.')
self.conf_thresh = conf_thresh
self.variance = cfg['variance']
self.use_gpu = use_gpu
def forward(self, loc_data, conf_data, prior_data, targets):
"""
Args:
loc_data: (tensor) Loc preds from loc layers
Shape: [batch,num_priors*4]
conf_data: (tensor) Shape: Conf preds from conf layers
Shape: [batch*num_priors,num_classes]
prior_data: (tensor) Prior boxes and variances from priorbox layers
Shape: [1,num_priors,4]
return:
rois:(tensor) rois after decoded loc data and nms
Shape: [batch, top_k, 5]
loc_pred: (tensor) loc after nms
Shape: [batch, top_k, 4]
cls_pred: (tensor) conf_data after nms
Shape: [batch, top_k, num_classes]
"""
priors = prior_data
batch = loc_data.size(0) # batch size
priors = priors[:loc_data.size(1), :]
num_priors = (priors.size(0))
num_classes = self.num_classes
# match priors (default boxes) and ground truth boxes
loc_t = torch.Tensor(batch, num_priors, 4)
conf_t = torch.Tensor(batch, num_priors)
for idx in range(batch):
truths = targets[idx][:, :-1].data
labels = targets[idx][:, -1].data
defaults = priors.data
match(self.threshold, truths, defaults, self.variance, labels,
loc_t, conf_t, idx) # 输出 loc_t, conf_t
print('conf_t size: ',conf_t.size())
print('conf type: ',conf_t.dtype)
print('conf_t values: \n',conf_t)
conf_t = conf_t.reshape(batch, num_priors,1)
result = torch.zeros(batch, self.top_k, 1 + num_classes + 3* 4 + 1)
conf_preds = conf_data.view(batch, num_priors,
num_classes).transpose(2, 1) # conf_preds size(num,num_classes,num_pirors)
conf_data = conf_data.view(batch, num_priors,
num_classes)
decoded_box = loc_data.new(loc_data.size(0), loc_data.size(1), loc_data.size(2)).zero_()
for i in range(batch): # 对每个batch分别处理
decoded_boxes = decode(loc_data[i], prior_data, self.variance) # box解码
# For each class, perform nms
conf_scores = conf_preds[i].clone() # conf_scores 为batch内容
loc_keep = loc_data[i].clone()
conf_keep = conf_data[i].clone()
loc_t_keep = loc_t[i].clone()
conf_t_keep = conf_t[i].clone()
decoded_box[i] = decoded_boxes
output = []
for cl in range(1, num_classes): # 对每个类目分别处理
c_mask = conf_scores[cl].gt(self.conf_thresh) # 将
# print('conf_thresh: ',self.conf_thresh)
# print('c_mask size: ',c_mask.size())
scores = conf_scores[cl][c_mask]
if scores.dim() == 0 or scores.size() == torch.Size([0]) :
print('scores dim: ',scores.dim(),scores.size())
continue
l_mask = c_mask.unsqueeze(1).expand_as(decoded_boxes)
s_mask = c_mask.unsqueeze(1).expand_as(conf_keep)
t_mask = c_mask.unsqueeze(1).expand_as(conf_t_keep)
boxes = decoded_boxes[l_mask].view(-1, 4)
# print('boxes size: ',boxes.size())
# print('scores.size()',scores.size(),scores.dim())
# print('decoded_boxes size: ',decoded_boxes.size())
# print(' conf_scores size: ',conf_scores.size())
loc = loc_keep[l_mask].view(-1, 4)
loc_t_res = loc_t_keep[l_mask].view(-1,4)
conf = conf_keep[s_mask].view(-1, num_classes)
conf_t_res = conf_t_keep[t_mask].view(-1,1)
#print(conf.size())
# idx of highest scoring and non-overlapping boxes per class
ids, count = nms(boxes, scores, self.nms_thresh, self.top_k)
output.append(
torch.cat((scores[ids[:count]].unsqueeze(1),
boxes[ids[:count]], loc[ids[:count]], conf[ids[:count]], loc_t_res[ids[:count]],
conf_t_res[ids[:count]]), 1)
)
# print(type(output))
# print(len(output))
# print(output)
# print(output[0].size())
# print(output[1].size())
res = output[0]
for j in range(len(output) - 1):
res = torch.cat((res, output[j + 1]), dim=0)
#
# print('index: ',i,'type',type(res))
# print('res size: ',res.size())
# print('res',res)
# 按照置信度排序
sort_conf = res[:, 0].clone()
res[:, 0] = i
# 去除重复的框 采用numpy处理方式
res = res.cpu().detach().numpy()
b = np.ascontiguousarray(res).view(np.dtype((np.void, res.dtype.itemsize * res.shape[1])))
_, idx = np.unique(b, return_index=True)
keep_res = torch.from_numpy(res[idx])
sort_val = sort_conf[idx].view(-1, 1)
_, indices = sort_val[:, 0].sort(0, descending=True)
# 保证输出框不大于top_k
res_sel = keep_res[indices][:self.top_k]
# print(res_sel.size())
# print(result[i].size())
result[i][:res_sel.size(0)] = res_sel
# print('result size: ',result.size())
# print('result: ',result)
# 分片索引 选出指定列
index1 = torch.tensor(range(0, 5))
index2 = torch.tensor(range(5, 9))
index3 = torch.tensor(range(9, 9 + num_classes))
index4 = torch.tensor(range(9 + num_classes, 9+num_classes+4))
index5 = torch.tensor(range(9 + num_classes + 4, 9+num_classes+4+1))
# rois 为前5列
rois = torch.index_select(result, -1, index1)
# print('rois: ',rois.size())
# loc 为中间4列
loc = torch.index_select(result, -1, index2)
# print('loc: ',loc.size())
# cls 为最后num_classes列
cls = torch.index_select(result, -1, index3)
# print('cls: ',cls)
loc_truth = torch.index_select(result, -1, index4)
conf_truth = torch.index_select(result, -1, index5)
print('conf_truth: ',conf_truth)
return rois, loc, cls, loc_truth, conf_truth
class test_target(Function):
def __init__(self, num_classes, top_k, overlap_thresh, conf_thresh, nms_thresh, use_gpu=True):
self.num_classes = num_classes
self.top_k = top_k
self.threshold = overlap_thresh
# Parameters used in nms.
self.nms_thresh = nms_thresh
if nms_thresh <= 0:
raise ValueError('nms_threshold must be non negative.')
self.conf_thresh = conf_thresh
self.variance = cfg['variance']
self.use_gpu = use_gpu
def forward(self, loc_data, conf_data, prior_data):
"""
Args:
loc_data: (tensor) Loc preds from loc layers
Shape: [batch,num_priors*4]
conf_data: (tensor) Shape: Conf preds from conf layers
Shape: [batch*num_priors,num_classes]
prior_data: (tensor) Prior boxes and variances from priorbox layers
Shape: [1,num_priors,4]
return:
rois:(tensor) rois after decoded loc data and nms
Shape: [batch, top_k, 5]
loc_pred: (tensor) loc after nms
Shape: [batch, top_k, 4]
cls_pred: (tensor) conf_data after nms
Shape: [batch, top_k, num_classes]
"""
priors = prior_data
batch = loc_data.size(0) # batch size
print('priors size: ',priors.size())
priors = priors[:loc_data.size(1), :]
print('after priors size: ',priors.size() )
num_priors = (priors.size(0))
num_classes = self.num_classes
result = torch.zeros(batch, self.top_k, 1 + 2 * 4 +num_classes)
conf_preds = conf_data.view(batch, num_priors,
num_classes).transpose(2, 1) # conf_preds size(num,num_classes,num_pirors)
conf_data = conf_data.view(batch, num_priors,
num_classes)
decoded_box = loc_data.new(loc_data.size(0), loc_data.size(1), loc_data.size(2)).zero_()
for i in range(batch): # 对每个batch分别处理
decoded_boxes = decode(loc_data[i], prior_data, self.variance) # box解码
# For each class, perform nms
conf_scores = conf_preds[i].clone() # conf_scores 为batch内容
loc_keep = loc_data[i].clone()
conf_keep = conf_data[i].clone()
decoded_box[i] = decoded_boxes
output = []
for cl in range(1, num_classes): # 对每个类目分别处理
c_mask = conf_scores[cl].gt(self.conf_thresh) # 将
scores = conf_scores[cl][c_mask]
if scores.dim() == 0 or scores.size() == torch.Size([0]):
continue
l_mask = c_mask.unsqueeze(1).expand_as(decoded_boxes)
s_mask = c_mask.unsqueeze(1).expand_as(conf_keep)
boxes = decoded_boxes[l_mask].view(-1, 4)
loc = loc_keep[l_mask].view(-1, 4)
conf = conf_keep[s_mask].view(-1, num_classes)
# print(conf.size())
# idx of highest scoring and non-overlapping boxes per class
ids, count = nms(boxes, scores, self.nms_thresh, self.top_k)
output.append(
torch.cat((scores[ids[:count]].unsqueeze(1),
boxes[ids[:count]], loc[ids[:count]], conf[ids[:count]] ), 1)
)
res = output[0]
for j in range(len(output) - 1):
res = torch.cat((res, output[j + 1]), dim=0)
#
# print('index: ',i,'type',type(res))
# print('res size: ',res.size())
# print('res',res)
# 按照置信度排序
# _, indices = res[:, 0].sort(0, descending=True)
# 将第一列改为 rois 第一列格式
sort_conf = res[:, 0].clone()
res[:, 0] = i
# 去除重复的框 采用numpy处理方式
res = res.cpu().detach().numpy()
b = np.ascontiguousarray(res).view(np.dtype((np.void, res.dtype.itemsize * res.shape[1])))
_, idx = np.unique(b, return_index=True)
keep_res = torch.from_numpy(res[idx])
sort_val = sort_conf[idx].view(-1, 1)
_, indices = sort_val[:, 0].sort(0, descending=True)
# 保证输出框不大于top_k
res_sel = keep_res[indices][:self.top_k]
result[i][:res_sel.size(0)] = res_sel
# 分片索引 选出指定列
index1 = torch.tensor(range(0, 5))
index2 = torch.tensor(range(5, 9))
index3 = torch.tensor(range(9, 9 + num_classes))
# rois 为前5列
rois = torch.index_select(result, -1, index1)
# print('rois: ',rois.size())
# loc 为中间4列
loc = torch.index_select(result, -1, index2)
# print('loc: ',loc.size())
# cls 为最后num_classes列
cls = torch.index_select(result, -1, index3)
return rois, loc, cls
class train_target(Function):
def __init__(self, num_classes, top_k, overlap_thresh, conf_thresh, nms_thresh, use_gpu=True):
self.num_classes = num_classes
self.top_k = top_k
self.threshold = overlap_thresh
# Parameters used in nms.
self.nms_thresh = nms_thresh
if nms_thresh <= 0:
raise ValueError('nms_threshold must be non negative.')
self.conf_thresh = conf_thresh
self.variance = cfg['variance']
self.use_gpu = use_gpu
def forward(self, loc_data, conf_data, prior_data):
"""
Args:
loc_data: (tensor) Loc preds from loc layers
Shape: [batch,num_priors*4]
conf_data: (tensor) Shape: Conf preds from conf layers
Shape: [batch*num_priors,num_classes]
prior_data: (tensor) Prior boxes and variances from priorbox layers
Shape: [1,num_priors,4]
return:
rois:(tensor) rois after decoded loc data and nms
Shape: [batch, top_k, 5]
loc_pred: (tensor) loc after nms
Shape: [batch, top_k, 4]
cls_pred: (tensor) conf_data after nms
Shape: [batch, top_k, num_classes]
"""
priors = prior_data
batch = loc_data.size(0) # batch size
print('priors size: ',priors.size())
priors = priors[:loc_data.size(1), :]
print('after priors size: ',priors.size() )
num_priors = (priors.size(0))
num_classes = self.num_classes
result = torch.zeros(batch, self.top_k, 1 + 3 * 4 +num_classes) # 加入score[1], rois[4], loc[4], priors[4]
conf_preds = conf_data.view(batch, num_priors,
num_classes).transpose(2, 1) # conf_preds size(num,num_classes,num_pirors)
conf_data = conf_data.view(batch, num_priors,
num_classes)
decoded_box = loc_data.new(loc_data.size(0), loc_data.size(1), loc_data.size(2)).zero_()
for i in range(batch): # 对每个batch分别处理
decoded_boxes = decode(loc_data[i], prior_data, self.variance) # box解码
# For each class, perform nms
conf_scores = conf_preds[i].clone() # conf_scores 为batch内容
loc_keep = loc_data[i].clone()
conf_keep = conf_data[i].clone()
priors_keep = priors[i].clone()
decoded_box[i] = decoded_boxes
output = []
for cl in range(1, num_classes): # 对每个类目分别处理
c_mask = conf_scores[cl].gt(self.conf_thresh) # 将
scores = conf_scores[cl][c_mask]
if scores.dim() == 0 or scores.size() == torch.Size([0]):
continue
l_mask = c_mask.unsqueeze(1).expand_as(decoded_boxes)
s_mask = c_mask.unsqueeze(1).expand_as(conf_keep)
boxes = decoded_boxes[l_mask].view(-1, 4)
loc = loc_keep[l_mask].view(-1, 4)
conf = conf_keep[s_mask].view(-1, num_classes)
prior_select = priors_keep[l_mask].view(-1, 4)
# print(conf.size())
# idx of highest scoring and non-overlapping boxes per class
ids, count = nms(boxes, scores, self.nms_thresh, self.top_k)
output.append(
torch.cat((scores[ids[:count]].unsqueeze(1),
boxes[ids[:count]], loc[ids[:count]], conf[ids[:count]], prior_select[ids[:count]] ), 1)
)
res = output[0]
for j in range(len(output) - 1):
res = torch.cat((res, output[j + 1]), dim=0)
#
# print('index: ',i,'type',type(res))
# print('res size: ',res.size())
# print('res',res)
# 按照置信度排序
# _, indices = res[:, 0].sort(0, descending=True)
# 将第一列改为 rois 第一列格式
sort_conf = res[:, 0].clone()
res[:, 0] = i
# 去除重复的框 采用numpy处理方式
res = res.cpu().detach().numpy()
b = np.ascontiguousarray(res).view(np.dtype((np.void, res.dtype.itemsize * res.shape[1])))
_, idx = np.unique(b, return_index=True)
keep_res = torch.from_numpy(res[idx])
sort_val = sort_conf[idx].view(-1, 1)
_, indices = sort_val[:, 0].sort(0, descending=True)
# 保证输出框不大于top_k
res_sel = keep_res[indices][:self.top_k]
result[i][:res_sel.size(0)] = res_sel
# 分片索引 选出指定列
index1 = torch.tensor(range(0, 5))
index2 = torch.tensor(range(5, 9))
index3 = torch.tensor(range(9, 9 + num_classes))
index4 = torch.tensor(range(9 + num_classes,9 + num_classes+4))
# rois 为前5列
rois = torch.index_select(result, -1, index1)
# print('rois: ',rois.size())
# loc 为中间4列
loc = torch.index_select(result, -1, index2)
# print('loc: ',loc.size())
# cls 为最后num_classes列
cls = torch.index_select(result, -1, index3)
priors_out = torch.index_select(result, -1, index4)
# 增加输出priors
return rois, loc, cls, priors_out | 42.320833 | 119 | 0.537856 | 2,533 | 20,314 | 4.112515 | 0.081721 | 0.052798 | 0.022847 | 0.025343 | 0.829509 | 0.818854 | 0.795623 | 0.793319 | 0.780455 | 0.774215 | 0 | 0.018104 | 0.344688 | 20,314 | 480 | 120 | 42.320833 | 0.764423 | 0.237275 | 0 | 0.754647 | 0 | 0 | 0.020196 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02974 | false | 0 | 0.022305 | 0 | 0.081784 | 0.033457 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.