hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fce4abd58335a0b28d0cc68f213d513a81cecdba
| 34
|
py
|
Python
|
test/tokenize/t26.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/tokenize/t26.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/tokenize/t26.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
def d22(a, b, c=2, d=2, *k): pass
| 17
| 33
| 0.5
| 10
| 34
| 1.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 0.205882
| 34
| 1
| 34
| 34
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| false
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
1e138c7fc4d85545d1a14f9a08944b258e4f0bee
| 138
|
py
|
Python
|
others/edge/speech_recognition/pytorch/src/deepspeech/decoder/__init__.py
|
luluseptember/inference
|
acbc7b0bf288343ed81e62b69dea8afec03d679b
|
[
"Apache-2.0"
] | 49
|
2018-11-02T15:04:40.000Z
|
2021-11-16T18:11:39.000Z
|
others/edge/speech_recognition/pytorch/src/deepspeech/decoder/__init__.py
|
luluseptember/inference
|
acbc7b0bf288343ed81e62b69dea8afec03d679b
|
[
"Apache-2.0"
] | 6
|
2018-12-03T19:29:49.000Z
|
2020-05-16T15:34:33.000Z
|
others/edge/speech_recognition/pytorch/src/deepspeech/decoder/__init__.py
|
luluseptember/inference
|
acbc7b0bf288343ed81e62b69dea8afec03d679b
|
[
"Apache-2.0"
] | 16
|
2018-11-08T11:52:54.000Z
|
2021-11-16T18:11:28.000Z
|
from deepspeech.decoder.beam import BeamCTCDecoder # noqa: F401
from deepspeech.decoder.greedy import GreedyCTCDecoder # noqa: F401
| 46
| 68
| 0.797101
| 16
| 138
| 6.875
| 0.625
| 0.254545
| 0.381818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050847
| 0.144928
| 138
| 2
| 69
| 69
| 0.881356
| 0.152174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1e353a6d9c62e76a583e07b3a038a38cb4fbc4a4
| 161
|
py
|
Python
|
ebdataset/__init__.py
|
tihbe/python-ebdataset
|
4d16822a3a6b45882124a8d7f7e124bd39a75868
|
[
"MIT"
] | 7
|
2020-07-30T09:31:08.000Z
|
2022-02-22T10:49:23.000Z
|
ebdataset/__init__.py
|
tihbe/python-ebdataset
|
4d16822a3a6b45882124a8d7f7e124bd39a75868
|
[
"MIT"
] | 3
|
2021-01-15T07:12:31.000Z
|
2021-10-07T02:59:32.000Z
|
ebdataset/__init__.py
|
tihbe/python-ebdataset
|
4d16822a3a6b45882124a8d7f7e124bd39a75868
|
[
"MIT"
] | 1
|
2021-03-01T13:27:06.000Z
|
2021-03-01T13:27:06.000Z
|
import ebdataset.vision as vision
import ebdataset.audio as audio
import ebdataset.generated as generated
import ebdataset.bci as bci
from .utils.units import *
| 26.833333
| 39
| 0.832298
| 24
| 161
| 5.583333
| 0.416667
| 0.447761
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124224
| 161
| 5
| 40
| 32.2
| 0.950355
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1e44fa78dcee224d8353cd2720c4dcf6fe1b8ccd
| 188
|
py
|
Python
|
tests/parsing/parser/test_str_to_ipv6.py
|
LeeeeT/valtypes
|
0c5df3cbab296bf9ca34544604fbb9496a41e01a
|
[
"MIT"
] | 3
|
2022-02-22T12:59:59.000Z
|
2022-03-10T14:12:25.000Z
|
tests/parsing/parser/test_str_to_ipv6.py
|
LeeeeT/valtypes
|
0c5df3cbab296bf9ca34544604fbb9496a41e01a
|
[
"MIT"
] | 3
|
2022-03-08T13:33:38.000Z
|
2022-03-25T03:31:56.000Z
|
tests/parsing/parser/test_str_to_ipv6.py
|
LeeeeT/valtypes
|
0c5df3cbab296bf9ca34544604fbb9496a41e01a
|
[
"MIT"
] | null | null | null |
from ipaddress import IPv6Address
from valtypes import parse
def test() -> None:
"""
It parses str to ipv6
"""
assert parse(IPv6Address, "1::2") == IPv6Address("1::2")
| 15.666667
| 60
| 0.62766
| 24
| 188
| 4.916667
| 0.708333
| 0.20339
| 0.220339
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0.234043
| 188
| 11
| 61
| 17.090909
| 0.763889
| 0.111702
| 0
| 0
| 0
| 0
| 0.05298
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1e6ff9b637a801bf6244f09e3f5b5dc7f5e975ca
| 31,074
|
py
|
Python
|
aikit/models/random_forest_addins.py
|
LionelMassoulard/aikit
|
98b2abaa3bf47ab46f2fd3c270010293de06dba9
|
[
"BSD-2-Clause"
] | null | null | null |
aikit/models/random_forest_addins.py
|
LionelMassoulard/aikit
|
98b2abaa3bf47ab46f2fd3c270010293de06dba9
|
[
"BSD-2-Clause"
] | null | null | null |
aikit/models/random_forest_addins.py
|
LionelMassoulard/aikit
|
98b2abaa3bf47ab46f2fd3c270010293de06dba9
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 1 08:59:01 2018
@author: Lionel Massoulard
"""
from sklearn.exceptions import NotFittedError
from sklearn.base import BaseEstimator, ClassifierMixin, TransformerMixin, RegressorMixin
from sklearn.linear_model import LogisticRegression, Ridge
from sklearn.decomposition import TruncatedSVD
from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor
from aikit.transformers.model_wrapper import ModelWrapper
import numpy as np
# In[]
def compute_node_norm_classification_tree(tree):
""" takes a DecisionTree Regressor and returns a value corresponding to the norm of each node, as well the coefficient of each node """
value = tree.tree_.value
ch_left = tree.tree_.children_left
ch_right = tree.tree_.children_right
nb_nodes = tree.tree_.node_count
parents = -np.ones(nb_nodes, dtype=np.int32)
nodes_index = np.arange(nb_nodes)
parents[ch_left[ch_left != -1]] = nodes_index[ch_left != -1]
parents[ch_right[ch_right != -1]] = nodes_index[ch_right != -1]
sum_v = value.sum(axis=2, keepdims=True)
proba = value / sum_v
ii = parents != -1
ii_root = parents == -1
nodes_value = np.zeros(proba.shape, dtype=np.float32)
nodes_value[ii] = proba[ii, :, :] - proba[parents[ii], :, :]
nodes_value[ii_root] = proba[ii_root, :, :]
delta_norm = (nodes_value ** 2).sum(axis=2).sum(axis=1)
nodes_norm = sum_v[:, 0, 0] * delta_norm
return nodes_norm, nodes_value
def compute_node_norm_regression_tree(tree):
""" takes a DecisionTree Classifier and returns a value corresponding to the norm of each node, as well the coefficient of each node """
ch_left = tree.tree_.children_left
ch_right = tree.tree_.children_right
value = tree.tree_.value
n_node_samples = tree.tree_.n_node_samples
nb_nodes = tree.tree_.node_count
parents = -np.ones(nb_nodes, dtype=np.int32)
nodes_index = np.arange(nb_nodes)
parents[ch_left[ch_left != -1]] = nodes_index[ch_left != -1]
parents[ch_right[ch_right != -1]] = nodes_index[ch_right != -1]
ii = parents != -1
ii_root = parents == -1
nodes_value = np.zeros(value.shape, dtype=np.float32)
nodes_value[ii] = value[ii, :, :] - value[parents[ii], :, :]
nodes_value[ii_root] = value[ii_root, :, :]
delta_norm = (nodes_value ** 2).sum(axis=2).sum(axis=1)
nodes_norm = n_node_samples * delta_norm
return nodes_norm, nodes_value
def compute_node_dept_is_leaves(tree):
""" takes a Decision Tree and returns information about each nodes : depts and if it a leaf or not """
n_nodes = tree.tree_.node_count
children_left = tree.tree_.children_left
children_right = tree.tree_.children_right
nodes_depth = np.zeros(shape=n_nodes, dtype=np.int32)
is_leaves = np.zeros(shape=n_nodes, dtype=bool)
stack = [(0, -1)] # seed is the root node id and its parent depth
while len(stack) > 0:
node_id, parent_depth = stack.pop()
nodes_depth[node_id] = parent_depth + 1
# If we have a test node
if children_left[node_id] != children_right[node_id]:
stack.append((children_left[node_id], parent_depth + 1))
stack.append((children_right[node_id], parent_depth + 1))
else:
is_leaves[node_id] = True
return nodes_depth, is_leaves
def compute_node_norm_regression_forest(forest):
all_nodes_norms = []
all_nodes_values = []
for tree in forest.estimators_:
node_norm, delta_value = compute_node_norm_regression_tree(tree)
all_nodes_norms.append(node_norm)
all_nodes_values.append(delta_value)
forest_nodes_norm = np.concatenate(all_nodes_norms, axis=0)
forest_nodes_value = np.concatenate(all_nodes_values, axis=0)
forest_nodes_value /= len(forest.estimators_)
return forest_nodes_norm, forest_nodes_value
def compute_node_norm_classification_forest(forest):
all_nodes_norms = []
all_nodes_values = []
for tree in forest.estimators_:
nodes_norm, nodes_value = compute_node_norm_classification_tree(tree)
all_nodes_norms.append(nodes_norm)
all_nodes_values.append(nodes_value)
forest_nodes_norm = np.concatenate(all_nodes_norms, axis=0)
forest_nodes_value = np.concatenate(all_nodes_values, axis=0)
forest_nodes_value /= len(forest.estimators_)
return forest_nodes_norm, forest_nodes_value
class WaveRandomForestClassifier(BaseEstimator, ClassifierMixin):
"""
RandomForest based classifier but with nodes that are removed
See Paper:
Wavelet decomposition of Random Forests
http://www.jmlr.org/papers/volume17/15-203/15-203.pdf
"""
def __init__(
self,
n_estimators=100,
criterion="gini",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.0,
max_features="auto",
max_leaf_nodes=None,
min_impurity_decrease=0.0,
min_impurity_split=None,
bootstrap=True,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None,
nodes_to_keep=0.9,
):
self.n_estimators = n_estimators
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_decrease = min_impurity_decrease
self.min_impurity_split = min_impurity_split
self.bootstrap = bootstrap
self.oob_score = oob_score
self.n_jobs = n_jobs
self.random_state = random_state
self.verbose = verbose
self.warm_start = warm_start
self.class_weight = class_weight
self.nodes_to_keep = nodes_to_keep
self.forest = None
def fit(self, X, y):
# 1) create RandomForest
self.forest = RandomForestClassifier(
n_estimators=self.n_estimators,
criterion=self.criterion,
max_depth=self.max_depth,
min_samples_split=self.min_samples_split,
min_samples_leaf=self.min_samples_leaf,
min_weight_fraction_leaf=self.min_weight_fraction_leaf,
max_features=self.max_features,
max_leaf_nodes=self.max_leaf_nodes,
min_impurity_decrease=self.min_impurity_decrease,
min_impurity_split=self.min_impurity_split,
bootstrap=self.bootstrap,
oob_score=self.oob_score,
n_jobs=self.n_jobs,
random_state=self.random_state,
verbose=self.verbose,
warm_start=self.warm_start,
class_weight=self.class_weight,
)
# 2) fit it
self.forest.fit(X, y)
self.n_outputs_ = self.forest.n_outputs_
# 3) retrieve node norms and values
self.nodes_norm, self.nodes_value = compute_node_norm_classification_forest(self.forest)
# 4) filter nodes
self._nodes_order = np.argsort(-self.nodes_norm)
if self.nodes_to_keep is not None:
if self.nodes_to_keep < 1:
nodes_to_keep = int(len(self._nodes_order) * self.nodes_to_keep)
else:
nodes_to_keep = int(self.nodes_to_keep)
self._ind_nodes_to_keep = self._nodes_order[:nodes_to_keep]
else:
self._ind_nodes_to_keep = None
return self
def _set_nodes_to_keep(self, nodes_to_keep):
""" change the number of waweletts to keep withtout refitting the underlying random forest """
self.nodes_to_keep = nodes_to_keep
if self.forest is not None:
if self.nodes_to_keep is None:
self._ind_nodes_to_keep = None
else:
if self.nodes_to_keep < 1:
nodes_to_keep = int(len(self._nodes_order) * self.nodes_to_keep)
else:
nodes_to_keep = int(self.nodes_to_keep)
self._ind_nodes_to_keep = self._nodes_order[:nodes_to_keep]
def predict_proba(self, X):
if self.forest is None:
raise NotFittedError("You should fit the model first")
path, _ = self.forest.decision_path(X)
if self._ind_nodes_to_keep is not None:
predict_proba_filtered = [
path[:, self._ind_nodes_to_keep].dot(self.nodes_value[self._ind_nodes_to_keep, n, :])
for n in range(self.nodes_value.shape[1])
]
else:
predict_proba_filtered = [
path[:, :].dot(self.nodes_value[:, n, :]) for n in range(self.nodes_value.shape[1])
]
for p in predict_proba_filtered:
p[p < 0] = 0
p[p > 1] = 1
if len(predict_proba_filtered) == 1:
return predict_proba_filtered[0]
else:
return predict_proba_filtered
@property
def classes_(self):
return self.forest.classes_
def predict(self, X):
"""Predict class for X.
The predicted class of an input sample is a vote by the trees in
the forest, weighted by their probability estimates. That is,
the predicted class is the one with highest mean probability
estimate across the trees.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
y : array of shape = [n_samples] or [n_samples, n_outputs]
The predicted classes.
"""
# Copied from base forest
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
return self.classes_.take(np.argmax(proba, axis=1), axis=0)
else:
n_samples = proba[0].shape[0]
predictions = np.zeros((n_samples, self.n_outputs_))
for k in range(self.n_outputs_):
predictions[:, k] = self.classes_[k].take(np.argmax(proba[k], axis=1), axis=0)
return predictions
def predict_log_proba(self, X):
"""Predict class log-probabilities for X.
The predicted class log-probabilities of an input sample is computed as
the log of the mean predicted class probabilities of the trees in the
forest.
Parameters
----------
X : array-like or sparse matrix of shape = [n_samples, n_features]
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
such arrays if n_outputs > 1.
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute `classes_`.
"""
# Copied from base forest
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
return np.log(proba)
else:
for k in range(self.n_outputs_):
proba[k] = np.log(proba[k])
return proba
class WaveRandomForestRegressor(BaseEstimator, RegressorMixin):
"""
RandomForest based classifier but with nodes that are removed
See Paper:
Wavelet decomposition of Random Forests
http://www.jmlr.org/papers/volume17/15-203/15-203.pdf
"""
def __init__(
self,
n_estimators=100,
criterion="mse",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.0,
max_features="auto",
max_leaf_nodes=None,
min_impurity_decrease=0.0,
min_impurity_split=None,
bootstrap=True,
oob_score=False,
n_jobs=1,
random_state=None,
verbose=0,
warm_start=False,
nodes_to_keep=0.9,
):
self.n_estimators = n_estimators
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_decrease = min_impurity_decrease
self.min_impurity_split = min_impurity_split
self.bootstrap = bootstrap
self.oob_score = oob_score
self.n_jobs = n_jobs
self.random_state = random_state
self.verbose = verbose
self.warm_start = warm_start
self.nodes_to_keep = nodes_to_keep
self.forest = None
def fit(self, X, y):
# 1) create RandomForest
self.forest = RandomForestRegressor(
n_estimators=self.n_estimators,
criterion=self.criterion,
max_depth=self.max_depth,
min_samples_split=self.min_samples_split,
min_samples_leaf=self.min_samples_leaf,
min_weight_fraction_leaf=self.min_weight_fraction_leaf,
max_features=self.max_features,
max_leaf_nodes=self.max_leaf_nodes,
min_impurity_decrease=self.min_impurity_decrease,
min_impurity_split=self.min_impurity_split,
bootstrap=self.bootstrap,
oob_score=self.oob_score,
n_jobs=self.n_jobs,
random_state=self.random_state,
verbose=self.verbose,
warm_start=self.warm_start,
)
# 2) fit it
self.forest.fit(X, y)
self.n_outputs_ = self.forest.n_outputs_
# 3) retrieve node norms and values
self.nodes_norm, self.nodes_value = compute_node_norm_regression_forest(self.forest)
# 4) filter nodes
self._nodes_order = np.argsort(-self.nodes_norm)
if self.nodes_to_keep is not None:
if self.nodes_to_keep < 1:
nodes_to_keep = int(len(self._nodes_order) * self.nodes_to_keep)
else:
nodes_to_keep = int(self.nodes_to_keep)
self._ind_nodes_to_keep = self._nodes_order[:nodes_to_keep]
else:
self._ind_nodes_to_keep = None
return self
def _set_nodes_to_keep(self, nodes_to_keep):
""" change the number of waweletts to keep withtout refitting the underlying random forest """
self.nodes_to_keep = nodes_to_keep
if self.forest is not None:
if self.nodes_to_keep is None:
self._ind_nodes_to_keep = None
else:
if self.nodes_to_keep < 1:
nodes_to_keep = int(len(self._nodes_order) * self.nodes_to_keep)
else:
nodes_to_keep = int(self.nodes_to_keep)
self._ind_nodes_to_keep = self._nodes_order[:nodes_to_keep]
def predict(self, X):
if self.forest is None:
raise NotFittedError("You should fit the model first")
path, _ = self.forest.decision_path(X)
if self._ind_nodes_to_keep is not None:
predict_proba_filtered = [
path[:, self._ind_nodes_to_keep].dot(self.nodes_value[self._ind_nodes_to_keep, n, :])
for n in range(self.nodes_value.shape[1])
]
else:
predict_proba_filtered = [
path[:, :].dot(self.nodes_value[:, n, :]) for n in range(self.nodes_value.shape[1])
]
if len(predict_proba_filtered) == 1:
return predict_proba_filtered[0][:, 0]
else:
return predict_proba_filtered
# In[]
class _RandomForestLinear(BaseEstimator, ClassifierMixin):
""" This model is a mixture of a classical RandomForest with on linear model plug after it
The idea is to fit a RandomForest and use the node as features for a linear model.
So re-optimizing globally the structure created by the RandomForest
Parameters
----------
n_estimators : int, default = 100
number of trees of the RandomForest
criterion : string, default = 'gini' or 'mse'
the splitting criterion for the RandomForest
max_deatures : string or number, default = 'auto',
the number of features per split
max_depth : int or None, default = None
the maximum depth of trees
random_state : int or None
random seed for RandomForest
other_rf_params : dict or None
additionnal parameters to be passed to the RandomForest
do_svd : boolean, default = False
if True will do an SVD before calling the linear algorithm
svd_n_components : int, default = 100
number of svd components
C : float, default = 1
linear model C parameter
"""
is_regression = None
def __init__(
self,
n_estimators=100,
criterion="gini",
max_features="auto",
max_depth=None,
random_state=None,
nodes_to_keep=None,
other_rf_params=None,
do_svd=False,
svd_n_components=100,
C=1,
):
self.n_estimators = n_estimators
self.criterion = criterion
self.max_features = max_features
self.max_depth = max_depth
self.random_state = random_state
self.do_svd = do_svd
self.svd_n_components = svd_n_components
self.nodes_to_keep = nodes_to_keep
self.other_rf_params = other_rf_params
self.C = C
def fit(self, X, y=None):
if self.is_regression:
rf_klass = RandomForestRegressor
lin_klass = Ridge
kwargs = {"alpha": self.C}
else:
rf_klass = RandomForestClassifier
lin_klass = LogisticRegression
kwargs = {"C": self.C}
if self.other_rf_params is None:
other_rf_params = {}
else:
other_rf_params = self.other_rf_params
self.forest = rf_klass(
n_estimators=self.n_estimators,
criterion=self.criterion,
max_features=self.max_features,
max_depth=self.max_depth,
random_state=self.random_state,
**other_rf_params
)
self.forest.fit(X, y)
Xnode_onehot, _ = self.forest.decision_path(X)
# Filter of Nodes ?
if self.nodes_to_keep is not None:
if self.is_regression:
nodes_norm, nodes_value = compute_node_norm_regression_forest(self.forest)
else:
nodes_norm, nodes_value = compute_node_norm_regression_forest(self.forest)
nodes_order = np.argsort(-nodes_norm)
if self.nodes_to_keep < 1:
nodes_to_keep = int(len(nodes_order) * self.nodes_to_keep)
else:
nodes_to_keep = int(self.nodes_to_keep)
self._ind_nodes_to_keep = nodes_order[:nodes_to_keep]
Xnode_onehot = Xnode_onehot[:, self._ind_nodes_to_keep]
else:
self._ind_nodes_to_keep = None
if self.do_svd:
self.svd = TruncatedSVD(n_components=100)
Xsvd = self.svd.fit_transform(Xnode_onehot)
else:
Xsvd = Xnode_onehot
self.linear = lin_klass(**kwargs)
self.linear.fit(Xsvd, y)
return self
def predict(self, X):
Xnode_onehot, _ = self.forest.decision_path(X)
if self._ind_nodes_to_keep is not None:
Xnode_onehot = Xnode_onehot[:, self._ind_nodes_to_keep]
if self.do_svd:
Xsvd = self.svd.transform(Xnode_onehot)
else:
Xsvd = Xnode_onehot
return self.linear.predict(Xsvd)
class RandomForestLogit(_RandomForestLinear):
__doc__ = _RandomForestLinear.__doc__
is_regression = False
@property
def classes_(self):
return self.linear.classes_
def predict_proba(self, X):
Xnode_onehot, _ = self.forest.decision_path(X)
if self._ind_nodes_to_keep is not None:
Xnode_onehot = Xnode_onehot[:, self._ind_nodes_to_keep]
if self.do_svd:
Xsvd = self.svd.transform(Xnode_onehot)
else:
Xsvd = Xnode_onehot
return self.linear.predict_proba(Xsvd)
def predict_log_proba(self, X):
Xnode_onehot, _ = self.forest.decision_path(X)
if self._ind_nodes_to_keep is not None:
Xnode_onehot = Xnode_onehot[:, self._ind_nodes_to_keep]
if self.do_svd:
Xsvd = self.svd.transform(Xnode_onehot)
else:
Xsvd = Xnode_onehot
return self.linear.predict_log_proba(Xsvd)
class RandomForestRidge(_RandomForestLinear):
__doc__ = _RandomForestLinear.__doc__
is_regression = True
def __init__(
self,
n_estimators=100,
criterion="mse", # change default argument
max_features="auto",
max_depth=None,
random_state=None,
nodes_to_keep=None,
other_rf_params=None,
do_svd=False,
svd_n_components=100,
C=1,
):
self.n_estimators = n_estimators
self.criterion = criterion
self.max_features = max_features
self.max_depth = max_depth
self.random_state = random_state
self.nodes_to_keep = nodes_to_keep
self.do_svd = do_svd
self.svd_n_components = svd_n_components
self.other_rf_params = other_rf_params
self.C = C
# In[]
# In[]
class _RandomForestTransformerAbstract(BaseEstimator, TransformerMixin):
""" This model is a transforms a classical RandomForest into a transformer by returning not the prediction but the nodes.
The process is the following :
1. fit a RandomForest
2. get the node dummy variable (using decision path)
3. (optional) filter some of the nodes
4. (optional) apply an SVD
It can be useful to
* craft non-linear features that can be given to a linear algorithm
* create a 'supervised' clustering algorithm
* create a similarity between observations based on their nodes
* ...
Parameters
----------
n_estimators : int, default = 100
number of trees of the RandomForest
criterion : string, default = 'gini' or 'mse'
the splitting criterion for the RandomForest
max_deatures : string or number, default = 'auto',
the number of features per split
max_depth : int or None, default = None
the maximum depth of trees
random_state : int or None
random seed for RandomForest
nodes_to_keep : int, float or None
number of nodes to keep in result (filter by their norm), if None no filter, if float < 1 taken as a percentage of the total number of nodes
other_rf_params : dict or None
additionnal parameters to be passed to the RandomForest
do_svd : boolean, default = False
if True will do an SVD before calling the linear algorithm
svd_n_components : int, default = 100
number of svd components
"""
is_regression = None
def __init__(
self,
n_estimators=100,
criterion="gini",
max_features="auto",
max_depth=None,
random_state=None,
nodes_to_keep=None,
other_rf_params=None,
do_svd=False,
svd_n_components=100,
):
self.n_estimators = n_estimators
self.criterion = criterion
self.max_features = max_features
self.max_depth = max_depth
self.random_state = random_state
self.nodes_to_keep = nodes_to_keep
self.do_svd = do_svd
self.svd_n_components = svd_n_components
self.other_rf_params = other_rf_params
def fit(self, X, y):
self._fit_transform(X, y, do_fit=True, do_transform=False)
return self
def transform(self, X):
Xres = self._fit_transform(X, y=None, do_fit=False, do_transform=True)
return Xres
def fit_transform(self, X, y):
Xres = self._fit_transform(X, y, do_fit=True, do_transform=True)
return Xres
def _fit_transform(self, X, y, do_fit, do_transform):
if do_fit:
if self.other_rf_params is None:
other_rf_params = {}
else:
other_rf_params = self.other_rf_params
if self.is_regression:
rf_klass = RandomForestRegressor
else:
rf_klass = RandomForestClassifier
## 1) create RF and fit it
self.forest = rf_klass(
n_estimators=self.n_estimators,
criterion=self.criterion,
max_features=self.max_features,
max_depth=self.max_depth,
random_state=self.random_state,
**other_rf_params
)
self.forest.fit(X, y)
## 2) retrieve node id
Xnode_onehot, _ = self.forest.decision_path(X)
### 3) filter nodes
if do_fit:
if self.nodes_to_keep is not None:
if self.is_regression:
nodes_norm, nodes_value = compute_node_norm_regression_forest(self.forest)
else:
nodes_norm, nodes_value = compute_node_norm_regression_forest(self.forest)
nodes_order = np.argsort(-nodes_norm)
if self.nodes_to_keep < 1:
nodes_to_keep = int(len(nodes_order) * self.nodes_to_keep)
else:
nodes_to_keep = int(self.nodes_to_keep)
self._ind_nodes_to_keep = nodes_order[:nodes_to_keep]
else:
self._ind_nodes_to_keep = None
if self._ind_nodes_to_keep is not None:
Xnode_onehot = Xnode_onehot[:, self._ind_nodes_to_keep]
if self.do_svd:
if do_fit:
self.svd = TruncatedSVD(n_components=self.svd_n_components)
Xsvd = self.svd.fit_transform(Xnode_onehot)
else:
Xsvd = self.svd.transform(Xnode_onehot)
else:
Xsvd = Xnode_onehot
if do_fit:
if self.do_svd:
self._features_names = ["RFNODE_SVD_%d" % i for i in range(Xsvd.shape[1])]
else:
self._features_names = ["RFNODE_%d" % i for i in range(Xsvd.shape[1])]
if do_transform:
return Xsvd
else:
return self
def get_feature_names(self):
return self._features_names
class _RandomForestClassifierTransformer(_RandomForestTransformerAbstract):
__doc__ = _RandomForestTransformerAbstract.__doc__
is_regression = False
class _RandomForestRegressorTransformer(_RandomForestTransformerAbstract):
__doc__ = _RandomForestTransformerAbstract.__doc__
is_regression = True
class RandomForestClassifierTransformer(ModelWrapper):
__doc__ = _RandomForestTransformerAbstract.__doc__
def __init__(
self,
n_estimators=100,
criterion="gini",
max_features="auto",
max_depth=None,
random_state=None,
nodes_to_keep=None,
do_svd=False,
svd_n_components=100,
other_rf_params=None,
columns_to_use=None,
desired_output_type=None,
):
self.n_estimators = n_estimators
self.criterion = criterion
self.max_features = max_features
self.max_depth = max_depth
self.random_state = random_state
self.nodes_to_keep = nodes_to_keep
self.do_svd = do_svd
self.svd_n_components = svd_n_components
self.other_rf_params = other_rf_params
self.columns_to_use = columns_to_use
self.desired_output_type = desired_output_type
super(RandomForestClassifierTransformer, self).__init__(
columns_to_use=columns_to_use,
regex_match=False,
work_on_one_column_only=False,
all_columns_at_once=True,
accepted_input_types=None,
column_prefix=None,
desired_output_type=desired_output_type,
must_transform_to_get_features_name=False,
dont_change_columns=False,
)
def _get_model(self, X, y=None):
return _RandomForestClassifierTransformer(
n_estimators=self.n_estimators,
criterion=self.criterion,
max_features=self.max_features,
random_state=self.random_state,
nodes_to_keep=self.nodes_to_keep,
do_svd=self.do_svd,
svd_n_components=self.svd_n_components,
other_rf_params=self.other_rf_params,
)
class RandomForestRegressorTransformer(ModelWrapper):
__doc__ = _RandomForestTransformerAbstract.__doc__
def __init__(
self,
n_estimators=100,
criterion="mse",
max_features="auto",
max_depth=None,
random_state=None,
nodes_to_keep=None,
do_svd=False,
svd_n_components=100,
other_rf_params=None,
columns_to_use=None,
desired_output_type=None,
):
self.n_estimators = n_estimators
self.criterion = criterion
self.max_features = max_features
self.max_depth = max_depth
self.random_state = random_state
self.nodes_to_keep = nodes_to_keep
self.do_svd = do_svd
self.svd_n_components = svd_n_components
self.other_rf_params = other_rf_params
self.columns_to_use = columns_to_use
self.desired_output_type = desired_output_type
super(RandomForestRegressorTransformer, self).__init__(
columns_to_use=columns_to_use,
regex_match=False,
work_on_one_column_only=False,
all_columns_at_once=True,
accepted_input_types=None,
column_prefix=None,
desired_output_type=desired_output_type,
must_transform_to_get_features_name=False,
dont_change_columns=False,
)
def _get_model(self, X, y=None):
return _RandomForestRegressorTransformer(
n_estimators=self.n_estimators,
criterion=self.criterion,
max_features=self.max_features,
random_state=self.random_state,
nodes_to_keep=self.nodes_to_keep,
do_svd=self.do_svd,
svd_n_components=self.svd_n_components,
other_rf_params=self.other_rf_params,
)
| 30.857994
| 148
| 0.629948
| 3,955
| 31,074
| 4.616941
| 0.089507
| 0.03483
| 0.062651
| 0.030394
| 0.801479
| 0.771468
| 0.729628
| 0.712486
| 0.709255
| 0.699671
| 0
| 0.00917
| 0.294619
| 31,074
| 1,006
| 149
| 30.888668
| 0.823897
| 0.163159
| 0
| 0.816587
| 0
| 0
| 0.005575
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.011164
| 0.007974
| 0.145136
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1e79a0ecc10621a8f2e165c3a44af2b3c5a1e3e6
| 10,472
|
py
|
Python
|
function/sample_staging.py
|
the-fool/gcp-auto-net-tagger
|
552ec87e933df5714daba2fd2f5addd5dde48bff
|
[
"Apache-2.0"
] | null | null | null |
function/sample_staging.py
|
the-fool/gcp-auto-net-tagger
|
552ec87e933df5714daba2fd2f5addd5dde48bff
|
[
"Apache-2.0"
] | null | null | null |
function/sample_staging.py
|
the-fool/gcp-auto-net-tagger
|
552ec87e933df5714daba2fd2f5addd5dde48bff
|
[
"Apache-2.0"
] | 1
|
2021-08-20T21:39:42.000Z
|
2021-08-20T21:39:42.000Z
|
sample = {
"asset": {
"ancestors": [
"projects/163454223397",
"organizations/673763744309"
],
"assetType": "compute.googleapis.com/Instance",
"name": "//compute.googleapis.com/projects/sc-vice-test/zones/us-central1-a/instances/instance-1",
"resource": {
"data": {
"allocationAffinity": {
"consumeAllocationType": "ANY_ALLOCATION"
},
"canIpForward": False,
"confidentialInstanceConfig": {
"enableConfidentialCompute": False
},
"cpuPlatform": "Unknown CPU Platform",
"creationTimestamp": "2021-04-22T13:51:49.576-07:00",
"deletionProtection": False,
"description": "",
"disks": [
{
"autoDelete": True,
"boot": True,
"deviceName": "instance-1",
"diskSizeGb": "10",
"guestOsFeatures": [
{
"type": "UEFI_COMPATIBLE"
},
{
"type": "VIRTIO_SCSI_MULTIQUEUE"
}
],
"index": 0,
"interface": "SCSI",
"licenses": [
"https://www.googleapis.com/compute/v1/projects/debian-cloud/global/licenses/debian-10-buster"
],
"mode": "READ_WRITE",
"source": "https://www.googleapis.com/compute/v1/projects/sc-vice-test/zones/us-central1-a/disks/instance-1",
"type": "PERSISTENT"
}
],
"displayDevice": {
"enableDisplay": False
},
"fingerprint": "kklxPt7MzL8=",
"id": "4486036186437803787",
"labelFingerprint": "42WmSpB8rSM=",
"machineType": "https://www.googleapis.com/compute/v1/projects/sc-vice-test/zones/us-central1-a/machineTypes/e2-medium",
"name": "instance-1",
"networkInterfaces": [
{
"accessConfigs": [
{
"name": "External NAT",
"natIP": "108.59.84.233",
"networkTier": "PREMIUM",
"type": "ONE_TO_ONE_NAT"
}
],
"fingerprint": "3XxnerGjaPY=",
"name": "nic0",
"network": "https://www.googleapis.com/compute/v1/projects/sc-vice-test/global/networks/default",
"networkIP": "10.128.0.2",
"subnetwork": "https://www.googleapis.com/compute/v1/projects/sc-vice-test/regions/us-central1/subnetworks/default"
}
],
"scheduling": {
"automaticRestart": True,
"onHostMaintenance": "MIGRATE",
"preemptible": False
},
"selfLink": "https://www.googleapis.com/compute/v1/projects/sc-vice-test/zones/us-central1-a/instances/instance-1",
"serviceAccounts": [
{
"email": "163454223397-compute@developer.gserviceaccount.com",
"scopes": [
"https://www.googleapis.com/auth/devstorage.read_only",
"https://www.googleapis.com/auth/logging.write",
"https://www.googleapis.com/auth/monitoring.write",
"https://www.googleapis.com/auth/servicecontrol",
"https://www.googleapis.com/auth/service.management.readonly",
"https://www.googleapis.com/auth/trace.append"
]
}
],
"shieldedInstanceConfig": {
"enableIntegrityMonitoring": True,
"enableSecureBoot": False,
"enableVtpm": True
},
"shieldedInstanceIntegrityPolicy": {
"updateAutoLearnPolicy": True
},
"startRestricted": False,
"status": "STAGING",
"tags": {
"fingerprint": "42WmSpB8rSM="
},
"zone": "https://www.googleapis.com/compute/v1/projects/sc-vice-test/zones/us-central1-a"
},
"discoveryDocumentUri": "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest",
"discoveryName": "Instance",
"location": "us-central1-a",
"parent": "//cloudresourcemanager.googleapis.com/projects/163454223397",
"version": "v1"
},
"updateTime": "2021-04-22T20:51:50.801629Z"
},
"priorAsset": {
"ancestors": [
"projects/163454223397",
"organizations/673763744309"
],
"assetType": "compute.googleapis.com/Instance",
"name": "//compute.googleapis.com/projects/sc-vice-test/zones/us-central1-a/instances/instance-1",
"resource": {
"data": {
"allocationAffinity": {
"consumeAllocationType": "ANY_ALLOCATION"
},
"canIpForward": False,
"confidentialInstanceConfig": {
"enableConfidentialCompute": False
},
"cpuPlatform": "Unknown CPU Platform",
"creationTimestamp": "2021-04-22T13:51:49.576-07:00",
"deletionProtection": False,
"description": "",
"disks": [
{
"autoDelete": True,
"boot": True,
"deviceName": "instance-1",
"diskSizeGb": "10",
"guestOsFeatures": [
{
"type": "UEFI_COMPATIBLE"
},
{
"type": "VIRTIO_SCSI_MULTIQUEUE"
}
],
"index": 0,
"interface": "SCSI",
"licenses": [
"https://www.googleapis.com/compute/v1/projects/debian-cloud/global/licenses/debian-10-buster"
],
"mode": "READ_WRITE",
"source": "https://www.googleapis.com/compute/v1/projects/sc-vice-test/zones/us-central1-a/disks/instance-1",
"type": "PERSISTENT"
}
],
"displayDevice": {
"enableDisplay": False
},
"fingerprint": "IbogiVywfFU=",
"id": "4486036186437803787",
"labelFingerprint": "42WmSpB8rSM=",
"machineType": "https://www.googleapis.com/compute/v1/projects/sc-vice-test/zones/us-central1-a/machineTypes/e2-medium",
"name": "instance-1",
"networkInterfaces": [
{
"accessConfigs": [
{
"name": "External NAT",
"networkTier": "PREMIUM",
"type": "ONE_TO_ONE_NAT"
}
],
"fingerprint": "bQWv9c5Re9E=",
"name": "nic0",
"network": "https://www.googleapis.com/compute/v1/projects/sc-vice-test/global/networks/default",
"subnetwork": "https://www.googleapis.com/compute/v1/projects/sc-vice-test/regions/us-central1/subnetworks/default"
}
],
"scheduling": {
"automaticRestart": True,
"onHostMaintenance": "MIGRATE",
"preemptible": False
},
"selfLink": "https://www.googleapis.com/compute/v1/projects/sc-vice-test/zones/us-central1-a/instances/instance-1",
"serviceAccounts": [
{
"email": "163454223397-compute@developer.gserviceaccount.com",
"scopes": [
"https://www.googleapis.com/auth/devstorage.read_only",
"https://www.googleapis.com/auth/logging.write",
"https://www.googleapis.com/auth/monitoring.write",
"https://www.googleapis.com/auth/servicecontrol",
"https://www.googleapis.com/auth/service.management.readonly",
"https://www.googleapis.com/auth/trace.append"
]
}
],
"shieldedInstanceConfig": {
"enableIntegrityMonitoring": True,
"enableSecureBoot": False,
"enableVtpm": True
},
"shieldedInstanceIntegrityPolicy": {
"updateAutoLearnPolicy": True
},
"startRestricted": False,
"status": "PROVISIONING",
"tags": {
"fingerprint": "42WmSpB8rSM="
},
"zone": "https://www.googleapis.com/compute/v1/projects/sc-vice-test/zones/us-central1-a"
},
"discoveryDocumentUri": "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest",
"discoveryName": "Instance",
"location": "us-central1-a",
"parent": "//cloudresourcemanager.googleapis.com/projects/163454223397",
"version": "v1"
},
"updateTime": "2021-04-22T20:51:49.759449Z"
},
"priorAssetState": "PRESENT",
"window": {
"startTime": "2021-04-22T20:51:50.801629Z"
}
}
| 46.336283
| 139
| 0.428571
| 679
| 10,472
| 6.583211
| 0.247423
| 0.098881
| 0.112752
| 0.131544
| 0.963982
| 0.963982
| 0.957047
| 0.957047
| 0.93736
| 0.93736
| 0
| 0.056705
| 0.445951
| 10,472
| 226
| 140
| 46.336283
| 0.713719
| 0
| 0
| 0.716814
| 0
| 0.070796
| 0.460708
| 0.102454
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.035398
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ebaeaadfe6d2340ce936de711d3092235223f7e
| 4,825
|
py
|
Python
|
tests/test_examples.py
|
wilsonify/tensorflow-examples
|
2271c666b33c7a74047c7196783ab04e9aee8362
|
[
"MIT"
] | 2
|
2019-11-21T02:43:24.000Z
|
2020-08-12T04:48:39.000Z
|
tests/test_examples.py
|
wilsonify/tensorflow-examples
|
2271c666b33c7a74047c7196783ab04e9aee8362
|
[
"MIT"
] | null | null | null |
tests/test_examples.py
|
wilsonify/tensorflow-examples
|
2271c666b33c7a74047c7196783ab04e9aee8362
|
[
"MIT"
] | 1
|
2021-02-06T12:36:58.000Z
|
2021-02-06T12:36:58.000Z
|
import pytest
from tensorflow_examples.examples import (
convolutional_neural_networks,
distributed_tensorflow,
queues_threads,
text_and_visualizations,
up_and_running,
word_embeddings_and_rnns
)
from tensorflow_examples.examples.convolutional_neural_networks import cifar_cnn, mnist_cnn
def test_smoke():
print("is anything on fire")
@pytest.mark.skip(reason='not implemented yet')
def test_distribute():
distributed_tensorflow.distribute()
@pytest.mark.skip(reason='not implemented yet')
def test_distribute_run():
distributed_tensorflow.distribute_run()
@pytest.mark.skip(reason='not implemented yet')
def test_queue_basic():
queues_threads.queue_basic()
@pytest.mark.skip(reason='not implemented yet')
def test_tfrecords_end_to_end():
queues_threads.tfrecords_end_to_end()
@pytest.mark.skip(reason='not implemented yet')
def test_tfrecords_read_write():
queues_threads.tfrecords_read_write()
@pytest.mark.skip(reason='not implemented yet')
def test_BasicRNNCell():
text_and_visualizations.BasicRNNCell()
@pytest.mark.skip(reason='not implemented yet')
def test_LSTM_supervised_embeddings():
text_and_visualizations.LSTM_supervised_embeddings()
@pytest.mark.skip(reason='not implemented yet')
def test_scan_example():
text_and_visualizations.scan_example()
@pytest.mark.skip(reason='not implemented yet')
def test_vanilla_rnn_with_tfboard():
text_and_visualizations.vanilla_rnn_with_tfboard()
@pytest.mark.skip(reason='not implemented yet')
def test_softmax():
up_and_running.softmax()
@pytest.mark.skip(reason='not implemented yet')
def test_GRU_pretrained_GloVe():
word_embeddings_and_rnns.GRU_pretrained_GloVe()
@pytest.mark.skip(reason='not implemented yet')
def test_word2vec():
word_embeddings_and_rnns.word2vec()
@pytest.mark.skip(reason='not implemented yet')
def test_build_second_net(cifar_data_manager):
cifar_data_manager.build_second_net()
@pytest.mark.skip(reason='not implemented yet')
def test_create_cifar_image():
convolutional_neural_networks.cifar_cnn.CifarDataManager.create_cifar_image()
@pytest.mark.skip(reason='not implemented yet')
def test_display_cifar():
convolutional_neural_networks.cifar_cnn.CifarDataManager.display_cifar()
@pytest.mark.skip(reason='not implemented yet')
def test_one_hot():
convolutional_neural_networks.cifar_cnn.CifarDataManager.one_hot()
@pytest.mark.skip(reason='not implemented yet')
def test_run_simple_net():
convolutional_neural_networks.cifar_cnn.CifarDataManager.run_simple_net()
@pytest.mark.skip(reason='not implemented yet')
def test_unpickle():
convolutional_neural_networks.cifar_cnn.CifarDataManager.unpickle()
@pytest.mark.skip(reason='not implemented yet')
def test_mnist_cnn():
convolutional_neural_networks.mnist_cnn()
@pytest.mark.skip(reason='not implemented yet')
def test_distribute():
distributed_tensorflow.distribute()
@pytest.mark.skip(reason='not implemented yet')
def test_distribute_run():
distributed_tensorflow.distribute_run()
@pytest.mark.skip(reason='not implemented yet')
def test_queue_basic():
queues_threads.queue_basic()
@pytest.mark.skip(reason='not implemented yet')
def test_tfrecords_end_to_end():
queues_threads.tfrecords_end_to_end()
@pytest.mark.skip(reason='not implemented yet')
def test_tfrecords_read_write():
queues_threads.tfrecords_read_write()
@pytest.mark.skip(reason='not implemented yet')
def test_BasicRNNCell():
text_and_visualizations.BasicRNNCell()
@pytest.mark.skip(reason='not implemented yet')
def test_LSTM_supervised_embeddings():
text_and_visualizations.LSTM_supervised_embeddings()
@pytest.mark.skip(reason='not implemented yet')
def test_get_sentence_batch():
text_and_visualizations.LSTM_supervised_embeddings.get_sentence_batch()
@pytest.mark.skip(reason='not implemented yet')
def test_scan_example():
text_and_visualizations.scan_example()
@pytest.mark.skip(reason='not implemented yet')
def test_vanilla_rnn_with_tfboard():
text_and_visualizations.vanilla_rnn_with_tfboard()
@pytest.mark.skip(reason='not implemented yet')
def test_hello_world_main_1():
from tensorflow_examples.examples.up_and_running import hello_world
up_and_running.hello_world.main_1()
@pytest.mark.skip(reason='not implemented yet')
def test_hello_world_main_2():
from tensorflow_examples.examples.up_and_running import hello_world
hello_world.main_2()
@pytest.mark.skip(reason='not implemented yet')
def test_softmax():
up_and_running.softmax()
@pytest.mark.skip(reason='not implemented yet')
def test_GRU_pretrained_GloVe():
word_embeddings_and_rnns.GRU_pretrained_GloVe()
@pytest.mark.skip(reason='not implemented yet')
def test_word2vec():
word_embeddings_and_rnns.word2vec()
| 25.802139
| 91
| 0.795233
| 646
| 4,825
| 5.597523
| 0.123839
| 0.067754
| 0.131637
| 0.188053
| 0.846239
| 0.846239
| 0.763274
| 0.763274
| 0.763274
| 0.702434
| 0
| 0.00184
| 0.099067
| 4,825
| 186
| 92
| 25.94086
| 0.829998
| 0
| 0
| 0.724138
| 0
| 0
| 0.137824
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.301724
| false
| 0
| 0.043103
| 0
| 0.344828
| 0.008621
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1ebfb5d5a4e7c7b6fd9f3f44b8153a8b24152e43
| 20,933
|
py
|
Python
|
port/modules/font/dvsm_21.py
|
diskman88/mpython-desktop-robot
|
01cd15fbeeba521ab874cf66f94d3909c4f8c39a
|
[
"MIT"
] | 53
|
2018-10-15T12:01:24.000Z
|
2019-11-22T09:31:02.000Z
|
port/modules/font/dvsm_21.py
|
diskman88/mpython-desktop-robot
|
01cd15fbeeba521ab874cf66f94d3909c4f8c39a
|
[
"MIT"
] | 10
|
2018-10-17T13:42:19.000Z
|
2019-11-25T06:42:40.000Z
|
port/modules/font/dvsm_21.py
|
diskman88/mpython-desktop-robot
|
01cd15fbeeba521ab874cf66f94d3909c4f8c39a
|
[
"MIT"
] | 26
|
2018-12-04T03:53:39.000Z
|
2019-11-22T03:40:05.000Z
|
# Code generated by font-to-py.py.
# Font: dsm.ttf
version = '0.26'
def height():
return 21
def max_width():
return 12
def hmap():
return True
def reverse():
return False
def monospaced():
return False
def min_ch():
return 32
def max_ch():
return 126
_font =\
b'\x0c\x00\x00\x00\x7c\x00\xfe\x00\x87\x00\x03\x00\x03\x00\x07\x00'\
b'\x0e\x00\x1c\x00\x38\x00\x30\x00\x30\x00\x30\x00\x00\x00\x30\x00'\
b'\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\x00\x00\x00\x00\xc0\x00\xc0\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\xcc\x00\xcc\x00\xcc\x00\xcc\x00'\
b'\xcc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x06\x60\x04\x40\x0c\xc0\x0c\xc0\x7f\xf0\x7f\xf0'\
b'\x08\x80\x19\x80\x19\x80\xff\xe0\xff\xe0\x33\x00\x33\x00\x22\x00'\
b'\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x08\x00\x08\x00\x3e\x00\x7f\x00\xe9\x00\xc8\x00\xc8\x00\x68\x00'\
b'\x3e\x00\x0b\x00\x09\x80\x09\x80\x8b\x80\xff\x00\x7e\x00\x08\x00'\
b'\x08\x00\x08\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x78\x00\xcc\x00'\
b'\xcc\x00\xcc\x00\xcc\x00\x78\xc0\x03\x00\x06\x00\x18\x00\x63\xc0'\
b'\x06\x60\x06\x60\x06\x60\x06\x60\x03\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x1f\x00\x3f\x00\x30\x00\x30\x00'\
b'\x30\x00\x18\x00\x18\x00\x7c\x00\x6e\x60\xc6\x60\xc3\x60\xc3\xc0'\
b'\xe1\x80\x7e\xc0\x3c\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x30\x00\x20\x00\x60\x00\x60\x00\x40\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x60\x00\x60\x00\x60\x00'\
b'\x20\x00\x30\x00\x00\x00\x00\x00\x0c\x00\x00\x00\xc0\x00\x40\x00'\
b'\x60\x00\x60\x00\x60\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00'\
b'\x30\x00\x30\x00\x30\x00\x60\x00\x60\x00\x60\x00\x40\x00\xc0\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x08\x00\x08\x00\x88\x80\x6b\x00'\
b'\x1c\x00\x1c\x00\x6b\x00\x88\x80\x08\x00\x08\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x0c\x00'\
b'\x0c\x00\x0c\x00\xff\xc0\xff\xc0\x0c\x00\x0c\x00\x0c\x00\x0c\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x60\x00\x60\x00\x60\x00\x60\x00'\
b'\xc0\x00\xc0\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf8\x00\xf8\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x00\xc0\x01\x80\x01\x80\x03\x00\x03\x00\x06\x00'\
b'\x06\x00\x0c\x00\x0c\x00\x18\x00\x18\x00\x30\x00\x30\x00\x60\x00'\
b'\x60\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x3c\x00\x7f\x00\x63\x00\xe3\x80\xc1\x80\xc1\x80\xcd\x80\xcd\x80'\
b'\xc1\x80\xc1\x80\xc1\x80\xe3\x80\x63\x00\x7f\x00\x3c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x38\x00\xf8\x00'\
b'\xd8\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00'\
b'\x18\x00\x18\x00\x18\x00\xff\x00\xff\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x7e\x00\xff\x00\x83\x80\x01\x80'\
b'\x01\x80\x01\x80\x03\x80\x03\x00\x06\x00\x0c\x00\x18\x00\x30\x00'\
b'\x60\x00\xff\x80\xff\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x7e\x00\xff\x00\x83\x80\x01\x80\x01\x80\x03\x80'\
b'\x1f\x00\x1e\x00\x03\x00\x01\x80\x01\x80\x01\x80\x83\x80\xff\x00'\
b'\x7e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x07\x00\x07\x00\x0f\x00\x0b\x00\x1b\x00\x13\x00\x33\x00\x63\x00'\
b'\x63\x00\xc3\x00\xff\xc0\xff\xc0\x03\x00\x03\x00\x03\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x7f\x00\x7f\x00'\
b'\x60\x00\x60\x00\x60\x00\x7e\x00\x7f\x00\x43\x80\x01\x80\x01\x80'\
b'\x01\x80\x01\x80\x83\x00\xff\x00\x7c\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x1e\x00\x3f\x00\x71\x00\x60\x00'\
b'\xc0\x00\xc0\x00\xde\x00\xff\x00\xe3\x80\xc1\x80\xc1\x80\xc1\x80'\
b'\x63\x80\x7f\x00\x3e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\xff\x80\xff\x80\x03\x00\x03\x00\x03\x00\x06\x00'\
b'\x06\x00\x06\x00\x0c\x00\x0c\x00\x0c\x00\x18\x00\x18\x00\x18\x00'\
b'\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x3e\x00\x7f\x00\xe3\x80\xc1\x80\xc1\x80\x63\x00\x3e\x00\x7f\x00'\
b'\x63\x00\xc1\x80\xc1\x80\xc1\x80\xe3\x80\x7f\x00\x3e\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x3e\x00\x7f\x00'\
b'\xe3\x00\xc1\x80\xc1\x80\xc1\x80\xe3\x80\x7f\x80\x3d\x80\x01\x80'\
b'\x01\x80\x03\x00\x47\x00\x7e\x00\x3c\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xc0\x00\xc0\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x60\x00'\
b'\x60\x00\x60\x00\x00\x00\x00\x00\x00\x00\x00\x00\x60\x00\x60\x00'\
b'\x60\x00\x60\x00\xc0\x00\xc0\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x03\xc0\x0f\x00\x3c\x00'\
b'\xe0\x00\xe0\x00\x3c\x00\x0f\x00\x03\xc0\x00\x40\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\xff\xc0\xff\xc0\x00\x00\x00\x00'\
b'\xff\xc0\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x80\x00\xf0\x00\x3c\x00\x0f\x00\x01\xc0\x01\xc0\x0f\x00\x3c\x00'\
b'\xf0\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x7c\x00\xfe\x00\x87\x00\x03\x00\x03\x00\x07\x00'\
b'\x0e\x00\x1c\x00\x38\x00\x30\x00\x30\x00\x30\x00\x00\x00\x30\x00'\
b'\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x00\x00\x0f\x00\x31\x80\x60\xc0\x60\xc0\x47\xc0\xc4\xc0\xcc\xc0'\
b'\xcc\xc0\xcc\xc0\xcc\xc0\xcc\xc0\xc4\xc0\x67\xc0\x60\x00\x30\x00'\
b'\x38\x00\x0f\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x0c\x00\x0c\x00'\
b'\x1e\x00\x1e\x00\x1e\x00\x3f\x00\x33\x00\x33\x00\x33\x00\x73\x80'\
b'\x7f\x80\x7f\x80\x61\x80\xc0\xc0\xc0\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\xfe\x00\xff\x00\xc3\x80\xc1\x80'\
b'\xc1\x80\xc3\x80\xff\x00\xff\x00\xc1\x80\xc0\xc0\xc0\xc0\xc0\xc0'\
b'\xc1\xc0\xff\x80\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x0f\x80\x3f\xc0\x70\x40\x60\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x60\x00\x70\x40\x3f\xc0'\
b'\x0f\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\xfc\x00\xff\x00\xc3\x80\xc1\x80\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0'\
b'\xc0\xc0\xc0\xc0\xc0\xc0\xc1\x80\xc3\x80\xff\x00\xfc\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\xff\xc0\xff\xc0'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xff\xc0\xff\xc0\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xff\xc0\xff\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\xff\xc0\xff\xc0\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xff\x80\xff\x80\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x1f\x00\x3f\x80\x70\x80\x60\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc3\xc0\xc3\xc0\xc0\xc0\xc0\xc0\x60\xc0\x70\xc0\x3f\xc0'\
b'\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xff\xc0\xff\xc0'\
b'\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\xff\x00\xff\x00'\
b'\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00'\
b'\x18\x00\x18\x00\x18\x00\xff\x00\xff\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x3f\x00\x3f\x00\x03\x00\x03\x00'\
b'\x03\x00\x03\x00\x03\x00\x03\x00\x03\x00\x03\x00\x03\x00\x03\x00'\
b'\x87\x00\xfe\x00\x7c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\xc0\xc0\xc1\x80\xc3\x00\xc6\x00\xcc\x00\xd8\x00'\
b'\xf8\x00\xfc\x00\xec\x00\xc6\x00\xc7\x00\xc3\x00\xc1\x80\xc1\xc0'\
b'\xc0\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xff\xc0\xff\xc0\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\xe1\xc0\xe1\xc0'\
b'\xe1\xc0\xf3\xc0\xd2\xc0\xd2\xc0\xde\xc0\xcc\xc0\xcc\xc0\xcc\xc0'\
b'\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\xe0\xc0\xe0\xc0\xf0\xc0\xf0\xc0'\
b'\xd8\xc0\xd8\xc0\xc8\xc0\xcc\xc0\xc4\xc0\xc6\xc0\xc6\xc0\xc3\xc0'\
b'\xc3\xc0\xc1\xc0\xc1\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x1e\x00\x7f\x80\x61\x80\xe1\xc0\xc0\xc0\xc0\xc0'\
b'\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xe1\xc0\x61\x80\x7f\x80'\
b'\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\xff\x00\xff\x80\xc1\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc1\xc0\xff\x80'\
b'\xff\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x1e\x00\x7f\x80'\
b'\x61\x80\xe1\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0'\
b'\xc0\xc0\xe1\xc0\x61\x80\x3f\x00\x1e\x00\x03\x00\x01\x80\x01\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\xff\x00\xff\x80\xc1\xc0\xc0\xc0'\
b'\xc0\xc0\xc1\xc0\xff\x80\xff\x00\xc3\x80\xc1\x80\xc1\xc0\xc0\xc0'\
b'\xc0\xe0\xc0\x60\xc0\x70\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x3f\x00\x7f\x80\xe0\x80\xc0\x00\xc0\x00\xe0\x00'\
b'\x7c\x00\x3f\x00\x03\x80\x00\xc0\x00\xc0\x00\xc0\x81\xc0\xff\x80'\
b'\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\xff\xf0\xff\xf0\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00'\
b'\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\xc0\xc0\xc0\xc0'\
b'\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0\xc0'\
b'\xc0\xc0\xc0\xc0\xe1\xc0\x7f\x80\x3f\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\xc0\xc0\xc0\xc0\x61\x80\x61\x80'\
b'\x61\x80\x61\x80\x33\x00\x33\x00\x33\x00\x3f\x00\x1e\x00\x1e\x00'\
b'\x1e\x00\x0c\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\xc0\x30\xc0\x30\xc0\x30\x60\x60\x66\x60\x66\x60'\
b'\x6f\x60\x6f\x60\x69\x60\x69\x60\x39\xc0\x39\xc0\x39\xc0\x30\xc0'\
b'\x30\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\xe1\xc0\x61\x80\x73\x80\x33\x00\x1e\x00\x1e\x00\x0c\x00\x0c\x00'\
b'\x1e\x00\x1e\x00\x37\x00\x33\x00\x63\x80\x61\x80\xc1\xc0\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\xe0\x70\x60\x60'\
b'\x30\xc0\x30\xc0\x19\x80\x1f\x80\x0f\x00\x06\x00\x06\x00\x06\x00'\
b'\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\xff\xc0\xff\xc0\x01\x80\x03\x80'\
b'\x03\x00\x06\x00\x0e\x00\x0c\x00\x1c\x00\x18\x00\x30\x00\x70\x00'\
b'\x60\x00\xff\xc0\xff\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\xf0\x00\xf0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xf0\x00\xf0\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\xc0\x00\x60\x00\x60\x00\x30\x00\x30\x00\x18\x00\x18\x00\x0c\x00'\
b'\x0c\x00\x06\x00\x06\x00\x03\x00\x03\x00\x01\x80\x01\x80\x00\xc0'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\xf0\x00\xf0\x00'\
b'\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00'\
b'\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\x30\x00\xf0\x00\xf0\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x0e\x00\x1b\x00\x31\x80\x60\xc0'\
b'\xc0\x60\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xff\xf0\xff\xf0\x00\x00\x00\x00\x00\x00\x0c\x00\xc0\x00'\
b'\x60\x00\x30\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3e\x00\x7f\x00\x43\x80\x01\x80\x3f\x80\x7f\x80'\
b'\xe1\x80\xc1\x80\xc3\x80\xff\x80\x3d\x80\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xde\x00\xff\x00\xe3\x00\xc1\x80\xc1\x80\xc1\x80\xc1\x80\xc1\x80'\
b'\xe3\x00\xff\x00\xde\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1e\x00\x7f\x00'\
b'\x61\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x61\x00\x7f\x00'\
b'\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x01\x80\x01\x80\x01\x80\x01\x80\x3d\x80\x7f\x80\x63\x80\xc1\x80'\
b'\xc1\x80\xc1\x80\xc1\x80\xc1\x80\x63\x80\x7f\x80\x3d\x80\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x1e\x00\x7f\x00\x63\x80\xc1\x80\xff\x80\xff\x80'\
b'\xc0\x00\xc0\x00\x60\x80\x7f\x80\x1f\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x0f\x80\x1f\x80\x18\x00\x18\x00'\
b'\xff\x80\xff\x80\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00'\
b'\x18\x00\x18\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3d\x80\x7f\x80'\
b'\x63\x80\xc1\x80\xc1\x80\xc1\x80\xc1\x80\xc1\x80\x63\x80\x7f\x80'\
b'\x3d\x80\x01\x80\x43\x80\x7f\x00\x3e\x00\x00\x00\x0c\x00\x00\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xce\x00\xff\x00\xe3\x80\xc1\x80'\
b'\xc1\x80\xc1\x80\xc1\x80\xc1\x80\xc1\x80\xc1\x80\xc1\x80\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x0c\x00\x0c\x00'\
b'\x00\x00\x00\x00\x7c\x00\x7c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00'\
b'\x0c\x00\x0c\x00\x0c\x00\xff\xc0\xff\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x0c\x00\x0c\x00\x00\x00\x00\x00'\
b'\x7c\x00\x7c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00'\
b'\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\xf8\x00\xf0\x00\x00\x00'\
b'\x0c\x00\x00\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc3\x80\xc7\x00'\
b'\xce\x00\xdc\x00\xf8\x00\xf8\x00\xec\x00\xce\x00\xc6\x00\xc3\x00'\
b'\xc3\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\xfc\x00\xfc\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00'\
b'\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0e\x00\x07\xc0\x03\xc0\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\xdb\x80\xff\xc0\xcc\xc0\xcc\xc0\xcc\xc0\xcc\xc0'\
b'\xcc\xc0\xcc\xc0\xcc\xc0\xcc\xc0\xcc\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xce\x00\xff\x00\xe3\x80\xc1\x80\xc1\x80\xc1\x80\xc1\x80\xc1\x80'\
b'\xc1\x80\xc1\x80\xc1\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3e\x00\x7f\x00'\
b'\x63\x00\xc1\x80\xc1\x80\xc1\x80\xc1\x80\xc1\x80\x63\x00\x7f\x00'\
b'\x3e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\xde\x00\xff\x00\xe3\x00\xc1\x80'\
b'\xc1\x80\xc1\x80\xc1\x80\xc1\x80\xe3\x00\xff\x00\xde\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x3d\x80\x7f\x80\x63\x80\xc1\x80\xc1\x80\xc1\x80'\
b'\xc1\x80\xc1\x80\x63\x80\x7f\x80\x3d\x80\x01\x80\x01\x80\x01\x80'\
b'\x01\x80\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xce\x00\xdf\x00\xf1\x00\xe0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3f\x00\x7f\x80'\
b'\xc0\x80\xc0\x00\xfe\x00\x3f\x00\x03\x80\x01\x80\x83\x80\xff\x00'\
b'\x7e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x00\x00\x18\x00\x18\x00\x18\x00\xff\x80\xff\x80\x18\x00\x18\x00'\
b'\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x1f\x80\x0f\x80\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\xc1\x80\xc1\x80\xc1\x80\xc1\x80\xc1\x80\xc1\x80'\
b'\xc1\x80\xc1\x80\xe3\x80\x7f\x80\x39\x80\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xc1\x80\xe3\x80\x63\x00\x63\x00\x77\x00\x36\x00\x36\x00\x36\x00'\
b'\x1c\x00\x1c\x00\x1c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc0\x30\xc0\x30'\
b'\x60\x60\x66\x60\x66\x60\x66\x60\x3f\xc0\x39\xc0\x39\xc0\x39\xc0'\
b'\x30\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\xe3\x80\x63\x00\x36\x00\x3e\x00'\
b'\x1c\x00\x1c\x00\x1c\x00\x3e\x00\x36\x00\x63\x00\xe3\x80\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\xc1\x80\x63\x00\x63\x00\x63\x00\x36\x00\x36\x00'\
b'\x3e\x00\x1c\x00\x1c\x00\x0c\x00\x18\x00\x18\x00\x18\x00\x70\x00'\
b'\x70\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\xff\x80\xff\x80\x07\x00\x06\x00\x0e\x00\x1c\x00\x38\x00\x30\x00'\
b'\x60\x00\xff\x80\xff\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0c\x00\x00\x00\x0f\x00\x1f\x00\x18\x00\x18\x00\x18\x00\x18\x00'\
b'\x18\x00\x18\x00\xf0\x00\xf0\x00\x38\x00\x18\x00\x18\x00\x18\x00'\
b'\x18\x00\x18\x00\x1f\x00\x0f\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x0c\x00\x00\x00\xf0\x00\xf8\x00'\
b'\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x0f\x00\x0f\x00'\
b'\x1c\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\xf8\x00\xf0\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x78\x40\xff\xc0\x87\x80\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
_index =\
b'\x00\x00\x2c\x00\x2c\x00\x58\x00\x58\x00\x84\x00\x84\x00\xb0\x00'\
b'\xb0\x00\xdc\x00\xdc\x00\x08\x01\x08\x01\x34\x01\x34\x01\x60\x01'\
b'\x60\x01\x8c\x01\x8c\x01\xb8\x01\xb8\x01\xe4\x01\xe4\x01\x10\x02'\
b'\x10\x02\x3c\x02\x3c\x02\x68\x02\x68\x02\x94\x02\x94\x02\xc0\x02'\
b'\xc0\x02\xec\x02\xec\x02\x18\x03\x18\x03\x44\x03\x44\x03\x70\x03'\
b'\x70\x03\x9c\x03\x9c\x03\xc8\x03\xc8\x03\xf4\x03\xf4\x03\x20\x04'\
b'\x20\x04\x4c\x04\x4c\x04\x78\x04\x78\x04\xa4\x04\xa4\x04\xd0\x04'\
b'\xd0\x04\xfc\x04\xfc\x04\x28\x05\x28\x05\x54\x05\x54\x05\x80\x05'\
b'\x80\x05\xac\x05\xac\x05\xd8\x05\xd8\x05\x04\x06\x04\x06\x30\x06'\
b'\x30\x06\x5c\x06\x5c\x06\x88\x06\x88\x06\xb4\x06\xb4\x06\xe0\x06'\
b'\xe0\x06\x0c\x07\x0c\x07\x38\x07\x38\x07\x64\x07\x64\x07\x90\x07'\
b'\x90\x07\xbc\x07\xbc\x07\xe8\x07\xe8\x07\x14\x08\x14\x08\x40\x08'\
b'\x40\x08\x6c\x08\x6c\x08\x98\x08\x98\x08\xc4\x08\xc4\x08\xf0\x08'\
b'\xf0\x08\x1c\x09\x1c\x09\x48\x09\x48\x09\x74\x09\x74\x09\xa0\x09'\
b'\xa0\x09\xcc\x09\xcc\x09\xf8\x09\xf8\x09\x24\x0a\x24\x0a\x50\x0a'\
b'\x50\x0a\x7c\x0a\x7c\x0a\xa8\x0a\xa8\x0a\xd4\x0a\xd4\x0a\x00\x0b'\
b'\x00\x0b\x2c\x0b\x2c\x0b\x58\x0b\x58\x0b\x84\x0b\x84\x0b\xb0\x0b'\
b'\xb0\x0b\xdc\x0b\xdc\x0b\x08\x0c\x08\x0c\x34\x0c\x34\x0c\x60\x0c'\
b'\x60\x0c\x8c\x0c\x8c\x0c\xb8\x0c\xb8\x0c\xe4\x0c\xe4\x0c\x10\x0d'\
b'\x10\x0d\x3c\x0d\x3c\x0d\x68\x0d\x68\x0d\x94\x0d\x94\x0d\xc0\x0d'\
b'\xc0\x0d\xec\x0d\xec\x0d\x18\x0e\x18\x0e\x44\x0e\x44\x0e\x70\x0e'\
b'\x70\x0e\x9c\x0e\x9c\x0e\xc8\x0e\xc8\x0e\xf4\x0e\xf4\x0e\x20\x0f'\
b'\x20\x0f\x4c\x0f\x4c\x0f\x78\x0f\x78\x0f\xa4\x0f\xa4\x0f\xd0\x0f'\
b'\xd0\x0f\xfc\x0f\xfc\x0f\x28\x10\x28\x10\x54\x10\x54\x10\x80\x10'\
_mvfont = memoryview(_font)
def get_ch(ch):
ordch = ord(ch)
ordch = ordch + 1 if ordch >= 32 and ordch <= 126 else 63
idx_offs = 4 * (ordch - 32)
offset = int.from_bytes(_index[idx_offs : idx_offs + 2], 'little')
next_offs = int.from_bytes(_index[idx_offs + 2 : idx_offs + 4], 'little')
width = int.from_bytes(_font[offset:offset + 2], 'little')
return _mvfont[offset + 2:next_offs], 21, width
| 63.62614
| 78
| 0.696317
| 5,009
| 20,933
| 2.90557
| 0.034937
| 0.584582
| 0.714855
| 0.74866
| 0.795589
| 0.758005
| 0.711763
| 0.685172
| 0.636663
| 0.612065
| 0
| 0.387193
| 0.037644
| 20,933
| 328
| 79
| 63.820122
| 0.335269
| 0.002197
| 0
| 0.171975
| 1
| 0.917197
| 0.897743
| 0.896673
| 0
| 1
| 0
| 0
| 0
| 1
| 0.025478
| false
| 0
| 0
| 0.022293
| 0.050955
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
94ddee63eee94addcc86274304c15055c55dffb5
| 111
|
py
|
Python
|
Src/Plugins/Tests.Security/TestData/SecurePlaintextSecrets/Inputs/SEC101_005.SlackApiKey.py
|
microsoft/spam
|
025e147cb8deb8cfeeebe8a839c183e9b016b51d
|
[
"MIT"
] | 24
|
2020-12-29T17:31:31.000Z
|
2022-03-25T15:18:07.000Z
|
Src/Plugins/Tests.Security/TestData/SecurePlaintextSecrets/Inputs/SEC101_005.SlackApiKey.py
|
microsoft/spam
|
025e147cb8deb8cfeeebe8a839c183e9b016b51d
|
[
"MIT"
] | 113
|
2020-11-06T09:42:43.000Z
|
2022-02-15T23:29:36.000Z
|
Src/Plugins/Tests.Security/TestData/SecurePlaintextSecrets/Inputs/SEC101_005.SlackApiKey.py
|
microsoft/spam
|
025e147cb8deb8cfeeebe8a839c183e9b016b51d
|
[
"MIT"
] | 11
|
2020-12-29T16:05:36.000Z
|
2021-12-10T19:19:31.000Z
|
xoxb-83112120353-1016171244646-sGMxuWapBw3w3qdK6OfTjORe
dead-83112120353-1016171244646-sGMxuWapBw3w3qdK6OfTjORf
| 55.5
| 55
| 0.936937
| 8
| 111
| 13
| 0.75
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.490909
| 0.009009
| 111
| 2
| 56
| 55.5
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bfce1faea22500832ebbb3c0c563b6eba4f60548
| 74,283
|
py
|
Python
|
venv/lib/python3.6/site-packages/ansible_collections/cisco/ios/plugins/modules/ios_ospfv3.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 1
|
2020-01-22T13:11:23.000Z
|
2020-01-22T13:11:23.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/cisco/ios/plugins/modules/ios_ospfv3.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 12
|
2020-02-21T07:24:52.000Z
|
2020-04-14T09:54:32.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/cisco/ios/plugins/modules/ios_ospfv3.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#
# -*- coding: utf-8 -*-
# Copyright 2020 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#############################################
# WARNING #
#############################################
#
# This file is auto generated by the resource
# module builder playbook.
#
# Do not edit this file manually.
#
# Changes to this file will be over written
# by the resource module builder.
#
# Changes should be made in the model used to
# generate this file or in the resource module
# builder template.
#
#############################################
"""
The module file for ios_ospfv3
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
module: ios_ospfv3
short_description: OSPFv3 resource module
description: This module configures and manages the Open Shortest Path First (OSPF)
version 3 on IOS platforms.
version_added: 1.1.0
author: Sumit Jaiswal (@justjais)
notes:
- Tested against Cisco IOSv Version 15.2 on VIRL.
- This module works with connection C(network_cli).
See U(https://docs.ansible.com/ansible/latest/network/user_guide/platform_ios.html)
options:
config:
description: A list of configurations for ospfv3.
type: dict
suboptions:
processes:
description: List of OSPF instance configurations.
type: list
elements: dict
suboptions:
process_id:
description: Process ID
required: true
type: int
address_family:
description: Enter Address Family command mode
type: list
elements: dict
suboptions:
afi:
description: Enter Address Family command mode
type: str
choices:
- ipv4
- ipv6
unicast:
description: Address Family modifier
type: bool
vrf:
description: Specify parameters for a VPN Routing/Forwarding instance
type: str
adjacency:
description: Control adjacency formation
type: dict
suboptions:
min_adjacency:
description:
- Initial number of adjacencies allowed to be forming in an area
- Please refer vendor documentation for valid values
type: int
none:
description: No initial
type: bool
max_adjacency:
description:
- Maximum number of adjacencies allowed to be forming
- Please refer vendor documentation for valid values
type: int
disable:
description: Disable adjacency staggering
type: bool
areas:
description: OSPF area parameters
type: list
elements: dict
suboptions:
area_id:
description:
- OSPF area ID as a decimal value. Please refer vendor documentation
of Valid values.
- OSPF area ID in IP address format(e.g. A.B.C.D)
type: str
authentication:
description: Authentication parameters
type: dict
suboptions:
key_chain:
description: Use a key-chain for cryptographic authentication keys
type: str
'null':
description: Use no authentication
type: bool
default_cost:
description:
- Set the summary default-cost of a NSSA/stub area
- Stub's advertised external route metric
- Note, please refer vendor documentation for respective valid values
type: int
filter_list:
description: Filter networks between OSPFv3 areas
type: list
elements: dict
suboptions:
name:
description: Name of an IP prefix-list
type: str
direction:
description: The direction to apply on the filter networks sent to and from this area.
type: str
choices: ['in', 'out']
required: True
normal:
description: Specify a normal area type
type: bool
nssa:
description: Specify a NSSA area
type: dict
suboptions:
set:
description: Enable a NSSA area
type: bool
default_information_originate:
description: Originate Type 7 default into NSSA area
type: dict
suboptions:
metric:
description: OSPF default metric
type: int
metric_type:
description:
- OSPF metric type for default routes
- OSPF Link State type
type: int
choices: [1, 2]
nssa_only:
description: Limit default advertisement to this NSSA area
type: bool
no_redistribution:
description: No redistribution into this NSSA area
type: bool
no_summary:
description: Do not send summary LSA into NSSA
type: bool
translate:
description:
- Translate LSA
- Always translate LSAs on this ABR
- Suppress forwarding address in translated LSAs
type: str
choices: ['always', 'suppress-fa']
ranges:
description: Summarize routes matching address/mask (border routers only)
type: list
elements: dict
suboptions:
address:
description: IP address to match
type: str
netmask:
description: IP mask for address
type: str
advertise:
description:
- Advertise this range (default)
- Since, advertise when enabled is not shown in running-config
idempotency won't be maintained for the play in the second or
next run of the play.
type: bool
cost:
description: User specified metric for this range
type: int
not_advertise:
description: DoNotAdvertise this range
type: bool
sham_link:
description: Define a sham link and its parameters
type: dict
suboptions:
source:
description: IPv6 address associated with sham-link source (X:X:X:X::X)
type: str
destination:
description: IPv6 address associated with sham-link destination (X:X:X:X::X)
type: str
authentication:
description: Authentication parameters
type: dict
suboptions:
key_chain:
description: Use a key-chain for cryptographic authentication keys
type: str
'null':
description: Use no authentication
type: bool
cost:
description:
- Associate a cost with the sham-link
- Cost of the sham-link
type: int
ttl_security:
description:
- TTL security check
- maximum number of hops allowed
type: int
stub:
description:
- Specify a stub area
- Backbone can not be configured as stub area
type: dict
suboptions:
set:
description: Enable a stub area
type: bool
no_summary:
description: Do not send summary LSA into stub area
type: bool
authentication:
description:
- Authentication parameters
- Authentication operation mode
type: dict
suboptions:
deployment:
description: Deployment mode of operation
type: bool
normal:
description: Normal mode of operation
type: bool
auto_cost:
description: Calculate OSPF interface cost according to bandwidth
type: dict
suboptions:
set:
description: Enable OSPF auto-cost
type: bool
reference_bandwidth:
description:
- Use reference bandwidth method to assign OSPF cost
- Note, refer vendor documentation for respective valid values
type: int
bfd:
description: BFD configuration commands
type: dict
suboptions:
all_interfaces:
description: Enable BFD on all interfaces
type: bool
disable:
description: Disable BFD on all interfaces
type: bool
capability:
description:
- Enable a specific feature
- Do not perform PE specific checks
type: bool
compatible:
description: OSPFv3 router compatibility list
type: dict
suboptions:
rfc1583:
description: compatible with RFC 1583
type: bool
rfc1587:
description: compatible with RFC 1587
type: bool
rfc5243:
description: supports DBD exchange optimization
type: bool
default_information:
description: Control distribution of default information
type: dict
suboptions:
originate:
description: Distribute a default route
type: bool
always:
description: Always advertise default route
type: bool
metric:
description:
- OSPF default metric
- Note, refer vendor documentation for respective valid values
type: int
metric_type:
description:
- OSPF metric type for default routes
- Note, please refer vendor documentation for respective valid range
type: int
route_map:
description: Route-map reference name
type: str
default_metric:
description: Set metric of redistributed routes
type: int
discard_route:
description: Enable or disable discard-route installation
type: dict
suboptions:
sham_link:
description: Discard route for sham-link routes
type: bool
external:
description: Discard route for summarised redistributed routes
type: bool
internal:
description: Discard route for summarised inter-area routes
type: bool
distance:
description:
- Define an administrative distance
- Note, please refer vendor documentation for respective valid range
type: int
distribute_list:
description: Filter networks in routing updates
type: dict
suboptions:
acls:
description: IP access list
type: list
elements: dict
suboptions:
name:
description: IP access list name/number
type: str
required: true
direction:
description: Filter incoming and outgoing routing updates.
type: str
required: true
choices: ['in', 'out']
interface:
description:
- Interface configuration (GigabitEthernet A/B)
- Valid with incoming traffic
type: str
protocol:
description:
- Protocol config (bgp 1).
- Valid with outgoing traffic
type: str
prefix:
description: Filter prefixes in routing updates
type: dict
suboptions:
name:
description: Name of an IP prefix-list
type: str
required: true
gateway_name:
description: Gateway name for filtering incoming updates based on gateway
type: str
direction:
description: Filter incoming and outgoing routing updates.
type: str
required: true
choices: ['in', 'out']
interface:
description:
- Interface configuration (GigabitEthernet A/B)
- Valid with incoming traffic
type: str
protocol:
description:
- Protocol config (bgp 1).
- Valid with outgoing traffic
type: str
route_map:
description: Filter prefixes in routing updates
type: dict
suboptions:
name:
description: Route-map name
type: str
required: true
event_log:
description: Event Logging
type: dict
suboptions:
enable:
description: Enable event Logging
type: bool
one_shot:
description: Disable Logging When Log Buffer Becomes Full
type: bool
pause:
description: Pause Event Logging
type: bool
size:
description:
- Maximum Number of Events Stored in the Event Log
- Note, refer vendor documentation for respective valid values
type: int
graceful_restart:
description:
- Graceful-restart options
- helper support
type: dict
suboptions:
enable:
description: helper support enabled
type: bool
disable:
description: disable helper support
type: bool
strict_lsa_checking:
description: enable helper strict LSA checking
type: bool
interface_id:
description: Source of the interface ID
type: dict
suboptions:
ios_if_index:
description: IOS interface number
type: bool
snmp_if_index:
description: SNMP MIB ifIndex
type: bool
limit:
description: Limit a specific OSPF feature
type: dict
suboptions:
dc:
description: Demand circuit retransmissions
type: dict
suboptions:
number:
description: The maximum number of retransmissions
type: int
disable:
description: Disble the feature
type: bool
non_dc:
description: Non-demand-circuit retransmissions
type: dict
suboptions:
number:
description: The maximum number of retransmissions
type: int
disable:
description: Disble the feature
type: bool
local_rib_criteria:
description: Enable or disable usage of local RIB as route criteria
type: dict
suboptions:
enable:
description: Enable usage of local RIB as route criteria
type: bool
forwarding_address:
description: Local RIB used to validate external/NSSA forwarding addresses
type: bool
inter_area_summary:
description: Local RIB used as criteria for inter-area summaries
type: bool
nssa_translation:
description: Local RIB used as criteria for NSSA translation
type: bool
log_adjacency_changes:
description: Log changes in adjacency state
type: dict
suboptions:
set:
description: Log changes in adjacency state
type: bool
detail:
description: Log all state changes
type: bool
manet:
description: Specify MANET OSPF parameters
type: dict
suboptions:
cache:
description: Specify MANET cache sizes
type: dict
suboptions:
acknowledgement:
description:
- Specify MANET acknowledgement cache size
- Maximum number of acknowledgements in cache
type: int
update:
description:
- Specify MANET LSA cache size
- Maximum number of LSAs in cache
type: int
hello:
description: Unicast Hellos rather than multicast
type: dict
suboptions:
multicast:
description: Multicast Hello requests and responses rather than unicast
type: bool
unicast:
description: Unicast Hello requests and responses rather than multicast
type: bool
peering:
description: MANET OSPF Smart Peering
type: dict
suboptions:
set:
description: Enable selective peering
type: bool
disable:
description: Disable selective peering
type: bool
per_interface:
description: Select peers per interface rather than per node
type: bool
redundancy:
description:
- Redundant paths
- Number of redundant OSPF paths
type: int
willingness:
description: Specify and Relay willingness value
type: int
max_lsa:
description: Maximum number of non self-generated LSAs to accept
type: dict
suboptions:
number:
description:
- Maximum number of non self-generated LSAs to accept
- Note, refer vendor documentation for respective valid values
type: int
threshold_value:
description:
- Threshold value (%) at which to generate a warning msg
- Note, refer vendor documentation for respective valid values
type: int
ignore_count:
description:
- Maximum number of times adjacencies can be suppressed
- Note, refer vendor documentation for respective valid values
type: int
ignore_time:
description:
- Number of minutes during which all adjacencies are suppressed
- Note, refer vendor documentation for respective valid values
type: int
reset_time:
description:
- Number of minutes after which ignore-count is reset to zero
- Note, refer vendor documentation for respective valid values
type: int
warning_only:
description: Only give a warning message when limit is exceeded
type: bool
max_metric:
description:
- Set maximum metric
- Maximum metric in self-originated router-LSAs
type: dict
suboptions:
disable:
description: disable maximum metric in self-originated router-LSAs
type: bool
external_lsa:
description:
- Override external-lsa metric with max-metric value
- Overriding metric in external-LSAs
- Note, refer vendor documentation for respective valid values
type: int
inter_area_lsas:
description:
- Override inter-area-lsas metric with max-metric value
- Overriding metric in inter-area-LSAs
- Note, refer vendor documentation for respective valid values
type: int
on_startup:
description: Set maximum metric temporarily after reboot
type: dict
suboptions:
time:
description:
- Time, in seconds, router-LSAs are originated with max-metric
- Note, please refer vendor documentation for respective valid range
type: int
wait_for_bgp:
description: Let BGP decide when to originate router-LSA with normal metric
type: bool
stub_prefix_lsa:
description: Set maximum metric for stub links in prefix LSAs
type: bool
maximum_paths:
description:
- Forward packets over multiple paths
- Number of paths
type: int
passive_interface:
description: Suppress routing updates on an interface
type: str
prefix_suppression:
description: Prefix suppression
type: dict
suboptions:
enable:
description: Enable prefix suppression
type: bool
disable:
description: Disable prefix suppression
type: bool
queue_depth:
description: Hello/Router process queue depth
type: dict
suboptions:
hello:
description: OSPF Hello process queue depth
type: dict
suboptions:
max_packets:
description: maximum number of packets in the queue
type: int
unlimited:
description: Unlimited queue depth
type: bool
update:
description: OSPF Router process queue depth
type: dict
suboptions:
max_packets:
description: maximum number of packets in the queue
type: int
unlimited:
description: Unlimited queue depth
type: bool
router_id:
description:
- Router-id address for this OSPF process
- OSPF router-id in IP address format (A.B.C.D)
type: str
shutdown:
description: Shutdown the router process
type: dict
suboptions:
enable:
description: Shutdown the router process
type: bool
disable:
description: Disable Shutdown
type: bool
summary_prefix:
description: Configure IP address summaries
type: dict
suboptions:
address:
description:
- IP summary address (A.B.C.D)
- IP prefix <network>/<length> (A.B.C.D/nn)
type: str
mask:
description: IP Summary mask
type: str
not_advertise:
description: Do not advertise or translate
type: bool
nssa_only:
description: Limit summary to NSSA areas
type: bool
tag:
description: Set tag
type: int
timers:
description: Adjust routing timers
type: dict
suboptions:
lsa:
description:
- OSPF LSA timers, arrival timer
- The minimum interval in milliseconds between accepting the same LSA
- Note, refer vendor documentation for respective valid values
type: int
manet:
description: OSPF MANET timers
type: dict
suboptions:
cache:
description: Specify MANET cache sizes
type: dict
suboptions:
acknowledgement:
description: Specify MANET acknowledgement cache size
type: int
redundancy:
description: Specify MANET LSA cache size
type: int
hello:
description:
- Unicast Hellos rather than multicast
- Unicast Hello requests and responses rather than multicast
type: bool
peering:
description: MANET OSPF Smart Peering
type: dict
suboptions:
set:
description: Enable selective peering
type: bool
per_interface:
description: Select peers per interface rather than per node
type: bool
redundancy:
description:
- Redundant paths
- Number of redundant OSPF paths
type: int
willingness:
description: Specify and Relay willingness value
type: int
pacing:
description: OSPF pacing timers
type: dict
suboptions:
flood:
description:
- OSPF flood pacing timer
- The minimum interval in msec to pace limit flooding on interface
- Note, refer vendor documentation for respective valid values
type: int
lsa_group:
description:
- OSPF LSA group pacing timer
- Interval in sec between group of LSA being refreshed or maxaged
- Note, refer vendor documentation for respective valid values
type: int
retransmission:
description:
- OSPF retransmission pacing timer
- The minimum interval in msec between neighbor retransmissions
- Note, refer vendor documentation for respective valid values
type: int
throttle:
description: OSPF throttle timers
type: dict
suboptions:
lsa:
description: OSPF LSA throttle timers
type: dict
suboptions:
first_delay:
description:
- Delay to generate first occurrence of LSA in milliseconds
- Note, refer vendor documentation for respective valid values
type: int
min_delay:
description:
- Minimum delay between originating the same LSA in milliseconds
- Note, refer vendor documentation for respective valid values
type: int
max_delay:
description:
- Maximum delay between originating the same LSA in milliseconds
- Note, refer vendor documentation for respective valid values
type: int
spf:
description: OSPF SPF throttle timers
- Delay between receiving a change to SPF calculation in milliseconds
- Note, refer vendor documentation for respective valid values
type: dict
suboptions:
receive_delay:
description:
- Delay between receiving a change to SPF calculation in milliseconds
- Note, refer vendor documentation for respective valid values
type: int
between_delay:
description:
- Delay between first and second SPF calculation in milliseconds
- Note, refer vendor documentation for respective valid values
type: int
max_delay:
description:
- Maximum wait time in milliseconds for SPF calculations
- Note, refer vendor documentation for respective valid values
type: int
adjacency:
description: Control adjacency formation
type: dict
suboptions:
min_adjacency:
description:
- Initial number of adjacencies allowed to be forming in an area
- Please refer vendor documentation for valid values
type: int
max_adjacency:
description:
- Maximum number of adjacencies allowed to be forming
- Please refer vendor documentation for valid values
type: int
none:
description: No initial
type: bool
areas:
description: OSPF area parameters
type: list
elements: dict
suboptions:
area_id:
description:
- OSPF area ID as a decimal value. Please refer vendor documentation
of Valid values.
- OSPF area ID in IP address format(e.g. A.B.C.D)
type: str
authentication:
description: Authentication parameters
type: dict
suboptions:
key_chain:
description: Use a key-chain for cryptographic authentication keys
type: str
ipsec:
description: Use IPsec authentication
type: dict
suboptions:
spi:
description: Set the SPI (Security Parameters Index)
type: int
md5:
description: Use MD5 authentication
type: int
sha1:
description: Use SHA-1 authentication
type: int
hex_string:
description: SHA-1 key (40 chars)
type: str
default_cost:
description:
- Set the summary default-cost of a NSSA/stub area
- Stub's advertised external route metric
- Note, please refer vendor documentation for respective valid values
type: int
nssa:
description: Specify a NSSA area
type: dict
suboptions:
set:
description: Enable a NSSA area
type: bool
default_information_originate:
description: Originate Type 7 default into NSSA area
type: dict
suboptions:
metric:
description: OSPF default metric
type: int
metric_type:
description:
- OSPF metric type for default routes
- OSPF Link State type
type: int
choices: [1, 2]
nssa_only:
description: Limit default advertisement to this NSSA area
type: bool
no_redistribution:
description: No redistribution into this NSSA area
type: bool
no_summary:
description: Do not send summary LSA into NSSA
type: bool
translate:
description:
- Translate LSA
- Always translate LSAs on this ABR
- Suppress forwarding address in translated LSAs
type: str
choices: ['always', 'suppress-fa']
stub:
description:
- Specify a stub area
- Backbone can not be configured as stub area
type: dict
suboptions:
set:
description: Enable a stub area
type: bool
no_summary:
description: Do not send summary LSA into stub area
type: bool
authentication:
description:
- Authentication parameter mode
- Deployment mode of operation
type: bool
auto_cost:
description: Calculate OSPF interface cost according to bandwidth
type: dict
suboptions:
set:
description: Enable OSPF auto-cost
type: bool
reference_bandwidth:
description:
- Use reference bandwidth method to assign OSPF cost
- Note, refer vendor documentation for respective valid values
type: int
bfd:
description:
- BFD configuration commands
- Enable BFD on all interfaces
type: bool
compatible:
description: OSPFv3 router compatibility list
type: dict
suboptions:
rfc1583:
description: compatible with RFC 1583
type: bool
rfc1587:
description: compatible with RFC 1587
type: bool
rfc5243:
description: supports DBD exchange optimization
type: bool
event_log:
description: Event Logging
type: dict
suboptions:
enable:
description: Enable event Logging
type: bool
one_shot:
description: Disable Logging When Log Buffer Becomes Full
type: bool
pause:
description: Pause Event Logging
type: bool
size:
description:
- Maximum Number of Events Stored in the Event Log
- Note, refer vendor documentation for respective valid values
type: int
graceful_restart:
description: Graceful-restart options for helper support
type: dict
suboptions:
disable:
description: disable helper support
type: bool
strict_lsa_checking:
description: enable helper strict LSA checking
type: bool
help:
description: Description of the interactive help system
type: bool
interface_id:
description:
- Source of the interface ID
- SNMP MIB ifIndex
type: bool
limit:
description: Limit a specific OSPF feature and LS update, DBD, and LS request retransmissions
type: dict
suboptions:
dc:
description: Demand circuit retransmissions
type: dict
suboptions:
number:
description: The maximum number of retransmissions
type: int
disable:
description: Disable the feature
type: bool
non_dc:
description: Non-demand-circuit retransmissions
type: dict
suboptions:
number:
description: The maximum number of retransmissions
type: int
disable:
description: Disable the feature
type: bool
local_rib_criteria:
description: Enable or disable usage of local RIB as route criteria
type: dict
suboptions:
enable:
description: Enable usage of local RIB as route criteria
type: bool
forwarding_address:
description: Local RIB used to validate external/NSSA forwarding addresses
type: bool
inter_area_summary:
description: Local RIB used as criteria for inter-area summaries
type: bool
nssa_translation:
description: Local RIB used as criteria for NSSA translation
type: bool
log_adjacency_changes:
description: Log changes in adjacency state
type: dict
suboptions:
set:
description: Log changes in adjacency state
type: bool
detail:
description: Log all state changes
type: bool
manet:
description: Specify MANET OSPF parameters
type: dict
suboptions:
cache:
description: Specify MANET cache sizes
type: dict
suboptions:
acknowledgement:
description: Specify MANET acknowledgement cache size
type: int
redundancy:
description: Specify MANET LSA cache size
type: int
hello:
description:
- Unicast Hellos rather than multicast
- Unicast Hello requests and responses rather than multicast
type: bool
peering:
description: MANET OSPF Smart Peering
type: dict
suboptions:
set:
description: Enable selective peering
type: bool
per_interface:
description: Select peers per interface rather than per node
type: bool
redundancy:
description:
- Redundant paths
- Number of redundant OSPF paths
type: int
willingness:
description: Specify and Relay willingness value
type: int
max_lsa:
description: Maximum number of non self-generated LSAs to accept
type: dict
suboptions:
number:
description:
- Maximum number of non self-generated LSAs to accept
- Note, refer vendor documentation for respective valid values
type: int
threshold_value:
description:
- Threshold value (%) at which to generate a warning msg
- Note, refer vendor documentation for respective valid values
type: int
ignore_count:
description:
- Maximum number of times adjacencies can be suppressed
- Note, refer vendor documentation for respective valid values
type: int
ignore_time:
description:
- Number of minutes during which all adjacencies are suppressed
- Note, refer vendor documentation for respective valid values
type: int
reset_time:
description:
- Number of minutes after which ignore-count is reset to zero
- Note, refer vendor documentation for respective valid values
type: int
warning_only:
description: Only give a warning message when limit is exceeded
type: bool
max_metric:
description: Set maximum metric
type: dict
suboptions:
router_lsa:
description: Maximum metric in self-originated router-LSAs
type: bool
required: true
external_lsa:
description:
- Override external-lsa metric with max-metric value
- Overriding metric in external-LSAs
- Note, refer vendor documentation for respective valid values
type: int
include_stub:
description: Set maximum metric for stub links in router-LSAs
type: bool
on_startup:
description: Set maximum metric temporarily after reboot
type: dict
suboptions:
time:
description:
- Time, in seconds, router-LSAs are originated with max-metric
- Note, please refer vendor documentation for respective valid range
type: int
wait_for_bgp:
description: Let BGP decide when to originate router-LSA with normal metric
type: bool
summary_lsa:
description:
- Override summary-lsa metric with max-metric value
- Note, please refer vendor documentation for respective valid range
type: int
passive_interface:
description: Suppress routing updates on an interface
type: str
prefix_suppression:
description: Enable prefix suppression
type: bool
queue_depth:
description: Hello/Router process queue depth
type: dict
suboptions:
hello:
description: OSPF Hello process queue depth
type: dict
suboptions:
max_packets:
description: maximum number of packets in the queue
type: int
unlimited:
description: Unlimited queue depth
type: bool
router_id:
description:
- Router-id address for this OSPF process
- OSPF router-id in IP address format (A.B.C.D)
type: str
shutdown:
description: Shutdown the router process
type: bool
timers:
description: Adjust routing timers
type: dict
suboptions:
lsa:
description:
- OSPF LSA timers, arrival timer
- The minimum interval in milliseconds between accepting the same LSA
- Note, refer vendor documentation for respective valid values
type: int
manet:
description: OSPF MANET timers
type: dict
suboptions:
cache:
description: Specify MANET cache sizes
type: dict
suboptions:
acknowledgement:
description: Specify MANET acknowledgement cache size
type: int
redundancy:
description: Specify MANET LSA cache size
type: int
hello:
description:
- Unicast Hellos rather than multicast
- Unicast Hello requests and responses rather than multicast
type: bool
peering:
description: MANET OSPF Smart Peering
type: dict
suboptions:
set:
description: Enable selective peering
type: bool
per_interface:
description: Select peers per interface rather than per node
type: bool
redundancy:
description:
- Redundant paths
- Number of redundant OSPF paths
type: int
willingness:
description: Specify and Relay willingness value
type: int
pacing:
description: OSPF pacing timers
type: dict
suboptions:
flood:
description:
- OSPF flood pacing timer
- The minimum interval in msec to pace limit flooding on interface
- Note, refer vendor documentation for respective valid values
type: int
lsa_group:
description:
- OSPF LSA group pacing timer
- Interval in sec between group of LSA being refreshed or maxaged
- Note, refer vendor documentation for respective valid values
type: int
retransmission:
description:
- OSPF retransmission pacing timer
- The minimum interval in msec between neighbor retransmissions
- Note, refer vendor documentation for respective valid values
type: int
throttle:
description: OSPF throttle timers
type: dict
suboptions:
lsa:
description: OSPF LSA throttle timers
type: dict
suboptions:
first_delay:
description:
- Delay to generate first occurrence of LSA in milliseconds
- Note, refer vendor documentation for respective valid values
type: int
min_delay:
description:
- Minimum delay between originating the same LSA in milliseconds
- Note, refer vendor documentation for respective valid values
type: int
max_delay:
description:
- Maximum delay between originating the same LSA in milliseconds
- Note, refer vendor documentation for respective valid values
type: int
spf:
description: OSPF SPF throttle timers
- Delay between receiving a change to SPF calculation in milliseconds
- Note, refer vendor documentation for respective valid values
type: dict
suboptions:
receive_delay:
description:
- Delay between receiving a change to SPF calculation in milliseconds
- Note, refer vendor documentation for respective valid values
type: int
between_delay:
description:
- Delay between first and second SPF calculation in milliseconds
- Note, refer vendor documentation for respective valid values
type: int
max_delay:
description:
- Maximum wait time in milliseconds for SPF calculations
- Note, refer vendor documentation for respective valid values
type: int
running_config:
description:
- This option is used only with state I(parsed).
- The value of this option should be the output received from the IOS device by
executing the command B(sh running-config | section ^router ospfv3).
- The state I(parsed) reads the configuration from C(running_config) option and
transforms it into Ansible structured data as per the resource module's argspec
and the value is then returned in the I(parsed) key within the result.
type: str
state:
description:
- The state the configuration should be left in
- The states I(rendered), I(gathered) and I(parsed) does not perform any change
on the device.
- The state I(rendered) will transform the configuration in C(config) option to
platform specific CLI commands which will be returned in the I(rendered) key
within the result. For state I(rendered) active connection to remote host is
not required.
- The state I(gathered) will fetch the running configuration from device and transform
it into structured data in the format as per the resource module argspec and
the value is returned in the I(gathered) key within the result.
- The state I(parsed) reads the configuration from C(running_config) option and
transforms it into JSON format as per the resource module parameters and the
value is returned in the I(parsed) key within the result. The value of C(running_config)
option should be the same format as the output of command I(show running-config
| include ip route|ipv6 route) executed on device. For state I(parsed) active
connection to remote host is not required.
type: str
choices:
- merged
- replaced
- overridden
- deleted
- gathered
- parsed
- rendered
default: merged
"""
EXAMPLES = """
# Using deleted
# Before state:
# -------------
#
# router-ios#sh running-config | section ^router ospfv3
# router ospfv3 1
# max-metric router-lsa on-startup 110
# area 10 nssa default-information-originate metric 10
# !
# address-family ipv4 unicast vrf blue
# adjacency stagger 50 50
# area 25 nssa default-information-originate metric 25 nssa-only
# exit-address-family
# router ospfv3 200
# max-metric router-lsa on-startup 100
# auto-cost reference-bandwidth 4
# !
# address-family ipv4 unicast
# adjacency stagger 200 200
# exit-address-family
- name: Delete provided OSPF V3 processes
cisco.ios.ios_ospfv3:
config:
processes:
- process_id: 1
state: deleted
# Commands Fired:
# ---------------
#
# "commands": [
# "no router ospfv3 1"
# ]
# After state:
# -------------
# router-ios#sh running-config | section ^router ospfv3
# router ospfv3 200
# max-metric router-lsa on-startup 100
# auto-cost reference-bandwidth 4
# !
# address-family ipv4 unicast
# adjacency stagger 200 200
# exit-address-family
# Using deleted without any config passed (NOTE: This will delete all OSPFV3 configuration from device)
# Before state:
# -------------
#
# router-ios#sh running-config | section ^router ospfv3
# router ospfv3 1
# max-metric router-lsa on-startup 110
# area 10 nssa default-information-originate metric 10
# !
# address-family ipv4 unicast vrf blue
# adjacency stagger 50 50
# area 25 nssa default-information-originate metric 25 nssa-only
# exit-address-family
# router ospfv3 200
# max-metric router-lsa on-startup 100
# auto-cost reference-bandwidth 4
# !
# address-family ipv4 unicast
# adjacency stagger 200 200
# exit-address-family
- name: Delete all OSPF processes
cisco.ios.ios_ospfv3:
state: deleted
# Commands Fired:
# ---------------
#
# "commands": [
# "no router ospfv3 200",
# "no router ospfv3 1"
# ]
# After state:
# -------------
# router-ios#sh running-config | section ^router ospfv3
# router-ios#
# Using merged
# Before state:
# -------------
#
# router-ios#sh running-config | section ^router ospfv3
# router-ios#
- name: Merge provided OSPFV3 configuration
cisco.ios.ios_ospfv3:
config:
processes:
- process_id: 1
max_metric:
router_lsa: true
on_startup:
time: 110
address_family:
- afi: ipv4
unicast: true
vrf: blue
adjacency:
min_adjacency: 50
max_adjacency: 50
areas:
- area_id: 25
nssa:
default_information_originate:
metric: 25
nssa_only: true
areas:
- area_id: "10"
nssa:
default_information_originate:
metric: 10
timers:
throttle:
lsa:
first_delay: 12
min_delay: 14
max_delay: 16
- process_id: 200
address_family:
- afi: ipv4
unicast: true
adjacency:
min_adjacency: 200
max_adjacency: 200
max_metric:
router_lsa: true
on_startup:
time: 100
auto_cost:
reference_bandwidth: 4
state: merged
# Commands Fired:
# ---------------
#
# "commands": [
# "router ospfv3 1",
# "max-metric router-lsa on-startup 110",
# "area 10 nssa default-information-originate metric 10",
# "address-family ipv4 unicast vrf blue",
# "adjacency stagger 50 50",
# "area 25 nssa default-information-originate metric 25 nssa-only",
# "exit-address-family",
# "router ospfv3 200",
# "auto-cost reference-bandwidth 4",
# "max-metric router-lsa on-startup 100",
# "address-family ipv4 unicast",
# "adjacency stagger 200 200",
# "exit-address-family"
# ]
# After state:
# -------------
#
# router-ios#sh running-config | section ^router ospfv3
# router ospfv3 1
# max-metric router-lsa on-startup 110
# area 10 nssa default-information-originate metric 10
# !
# address-family ipv4 unicast vrf blue
# adjacency stagger 50 50
# area 25 nssa default-information-originate metric 25 nssa-only
# exit-address-family
# router ospfv3 200
# max-metric router-lsa on-startup 100
# auto-cost reference-bandwidth 4
# !
# address-family ipv4 unicast
# adjacency stagger 200 200
# exit-address-family
# Using overridden
# Before state:
# -------------
#
# router ospfv3 1
# max-metric router-lsa on-startup 110
# area 10 nssa default-information-originate metric 10
# !
# address-family ipv4 unicast vrf blue
# adjacency stagger 50 50
# area 25 nssa default-information-originate metric 25 nssa-only
# exit-address-family
# router ospfv3 200
# max-metric router-lsa on-startup 100
# auto-cost reference-bandwidth 4
# !
# address-family ipv4 unicast
# adjacency stagger 200 200
# exit-address-family
- name: Override provided OSPFV3 configuration
cisco.ios.ios_ospfv3:
config:
processes:
- process_id: 200
max_metric:
router_lsa: true
on_startup:
time: 200
address_family:
- afi: ipv4
unicast: true
adjacency:
min_adjacency: 50
max_adjacency: 50
areas:
- area_id: 200
nssa:
default_information_originate:
metric: 200
nssa_only: true
areas:
- area_id: "10"
nssa:
default_information_originate:
metric: 10
state: overridden
# Commands Fired:
# ---------------
#
# "commands": [
# "no router ospfv3 1",
# "router ospfv3 200",
# "no auto-cost reference-bandwidth 4",
# "max-metric router-lsa on-startup 200",
# "area 10 nssa default-information-originate metric 10",
# "address-family ipv4 unicast",
# "adjacency stagger 50 50",
# "area 200 nssa default-information-originate metric 200 nssa-only",
# "exit-address-family"
# ]
# After state:
# -------------
#
# router-ios#sh running-config | section ^router ospfv3
# router ospfv3 200
# max-metric router-lsa on-startup 200
# area 10 nssa default-information-originate metric 10
# !
# address-family ipv4 unicast
# adjacency stagger 50 50
# area 200 nssa default-information-originate metric 200 nssa-only
# exit-address-family
# Using replaced
# Before state:
# -------------
#
# router-ios#sh running-config | section ^router ospfv3
# router ospfv3 1
# max-metric router-lsa on-startup 110
# area 10 nssa default-information-originate metric 10
# !
# address-family ipv4 unicast vrf blue
# adjacency stagger 50 50
# area 25 nssa default-information-originate metric 25 nssa-only
# exit-address-family
# router ospfv3 200
# max-metric router-lsa on-startup 100
# auto-cost reference-bandwidth 4
# !
# address-family ipv4 unicast
# adjacency stagger 200 200
# exit-address-family
- name: Replaced provided OSPFV3 configuration
cisco.ios.ios_ospfv3:
config:
processes:
- process_id: 200
max_metric:
router_lsa: true
on_startup:
time: 200
address_family:
- afi: ipv4
unicast: true
adjacency:
min_adjacency: 50
max_adjacency: 50
areas:
- area_id: 200
nssa:
default_information_originate:
metric: 200
nssa_only: true
areas:
- area_id: "10"
nssa:
default_information_originate:
metric: 10
state: replaced
# Commands Fired:
# ---------------
# "commands": [
# "router ospfv3 200",
# "no auto-cost reference-bandwidth 4",
# "max-metric router-lsa on-startup 200",
# "area 10 nssa default-information-originate metric 10",
# "address-family ipv4 unicast",
# "adjacency stagger 50 50",
# "area 200 nssa default-information-originate metric 200 nssa-only",
# "exit-address-family"
# ]
# After state:
# -------------
# router-ios#sh running-config | section ^router ospfv3
# router ospfv3 1
# max-metric router-lsa on-startup 110
# area 10 nssa default-information-originate metric 10
# !
# address-family ipv4 unicast vrf blue
# adjacency stagger 50 50
# area 25 nssa default-information-originate metric 25 nssa-only
# exit-address-family
# router ospfv3 200
# max-metric router-lsa on-startup 200
# area 10 nssa default-information-originate metric 10
# !
# address-family ipv4 unicast
# adjacency stagger 50 50
# area 200 nssa default-information-originate metric 200 nssa-only
# exit-address-family
# Using Gathered
# Before state:
# -------------
#
# router-ios#sh running-config | section ^router ospfv3
# router ospfv3 1
# max-metric router-lsa on-startup 110
# area 10 nssa default-information-originate metric 10
# !
# address-family ipv4 unicast vrf blue
# adjacency stagger 50 50
# area 25 nssa default-information-originate metric 25 nssa-only
# exit-address-family
# router ospfv3 200
# max-metric router-lsa on-startup 100
# auto-cost reference-bandwidth 4
# !
# address-family ipv4 unicast
# adjacency stagger 200 200
# exit-address-family
- name: Gather OSPFV3 provided configurations
cisco.ios.ios_ospfv3:
config:
state: gathered
# Module Execution Result:
# ------------------------
#
# "gathered": {
# "processes": [
# {
# "address_family": [
# {
# "adjacency": {
# "max_adjacency": 50,
# "min_adjacency": 50
# },
# "afi": "ipv4",
# "areas": [
# {
# "area_id": "25",
# "nssa": {
# "default_information_originate": {
# "metric": 25,
# "nssa_only": true
# }
# }
# }
# ],
# "unicast": true,
# "vrf": "blue"
# }
# ],
# "areas": [
# {
# "area_id": "10",
# "nssa": {
# "default_information_originate": {
# "metric": 10
# }
# }
# }
# ],
# "max_metric": {
# "on_startup": {
# "time": 110
# },
# "router_lsa": true
# },
# "process_id": 1
# },
# {
# "address_family": [
# {
# "adjacency": {
# "max_adjacency": 200,
# "min_adjacency": 200
# },
# "afi": "ipv4",
# "unicast": true
# }
# ],
# "auto_cost": {
# "reference_bandwidth": 4
# },
# "max_metric": {
# "on_startup": {
# "time": 100
# },
# "router_lsa": true
# },
# "process_id": 200
# }
# ]
# }
# After state:
# ------------
#
# router-ios#sh running-config | section ^router ospfv3
# router ospfv3 1
# max-metric router-lsa on-startup 110
# area 10 nssa default-information-originate metric 10
# !
# address-family ipv4 unicast vrf blue
# adjacency stagger 50 50
# area 25 nssa default-information-originate metric 25 nssa-only
# exit-address-family
# router ospfv3 200
# max-metric router-lsa on-startup 100
# auto-cost reference-bandwidth 4
# !
# address-family ipv4 unicast
# adjacency stagger 200 200
# exit-address-family
# Using Rendered
- name: Render the commands for provided configuration
cisco.ios.ios_ospfv3:
config:
processes:
- process_id: 1
max_metric:
router_lsa: true
on_startup:
time: 110
address_family:
- afi: ipv4
unicast: true
vrf: blue
adjacency:
min_adjacency: 50
max_adjacency: 50
areas:
- area_id: 25
nssa:
default_information_originate:
metric: 25
nssa_only: true
areas:
- area_id: "10"
nssa:
default_information_originate:
metric: 10
timers:
throttle:
lsa:
first_delay: 12
min_delay: 14
max_delay: 16
- process_id: 200
address_family:
- afi: ipv4
unicast: true
adjacency:
min_adjacency: 200
max_adjacency: 200
max_metric:
router_lsa: true
on_startup:
time: 100
auto_cost:
reference_bandwidth: 4
state: rendered
# Module Execution Result:
# ------------------------
#
# "rendered": [
# "router ospfv3 1",
# "max-metric router-lsa on-startup 110",
# "area 10 nssa default-information-originate metric 10",
# "address-family ipv4 unicast vrf blue",
# "adjacency stagger 50 50",
# "area 25 nssa default-information-originate metric 25 nssa-only",
# "exit-address-family",
# "router ospfv3 200",
# "auto-cost reference-bandwidth 4",
# "max-metric router-lsa on-startup 100",
# "address-family ipv4 unicast",
# "adjacency stagger 200 200",
# "exit-address-family"
# ]
# Using Parsed
# File: parsed.cfg
# ----------------
#
# router ospfv3 1
# max-metric router-lsa on-startup 110
# area 10 nssa default-information-originate metric 10
# !
# address-family ipv4 unicast vrf blue
# adjacency stagger 50 50
# area 25 nssa default-information-originate metric 25 nssa-only
# exit-address-family
# router ospfv3 200
# max-metric router-lsa on-startup 100
# auto-cost reference-bandwidth 4
# !
# address-family ipv4 unicast
# adjacency stagger 200 200
# exit-address-family
- name: Parse the provided configuration with the existing running configuration
cisco.ios.ios_ospfv3:
running_config: "{{ lookup('file', 'parsed.cfg') }}"
state: parsed
# Module Execution Result:
# ------------------------
#
# "parsed": {
# "processes": [
# {
# "address_family": [
# {
# "adjacency": {
# "max_adjacency": 50,
# "min_adjacency": 50
# },
# "afi": "ipv4",
# "areas": [
# {
# "area_id": "25",
# "nssa": {
# "default_information_originate": {
# "metric": 25,
# "nssa_only": true
# }
# }
# }
# ],
# "unicast": true,
# "vrf": "blue"
# }
# ],
# "areas": [
# {
# "area_id": "10",
# "nssa": {
# "default_information_originate": {
# "metric": 10
# }
# }
# }
# ],
# "max_metric": {
# "on_startup": {
# "time": 110
# },
# "router_lsa": true
# },
# "process_id": 1
# }
# ]
# }
"""
RETURN = """
before:
description: The configuration prior to the model invocation.
returned: always
sample: >
The configuration returned will always be in the same format
of the parameters above.
type: dict
after:
description: The resulting configuration model invocation.
returned: when changed
sample: >
The configuration returned will always be in the same format
of the parameters above.
type: dict
commands:
description: The set of commands pushed to the remote device.
returned: always
type: list
sample: ['router ospfv3 1', 'address-family ipv4 unicast vrf blue', 'adjacency stagger 50 50']
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.cisco.ios.plugins.module_utils.network.ios.argspec.ospfv3.ospfv3 import (
Ospfv3Args,
)
from ansible_collections.cisco.ios.plugins.module_utils.network.ios.config.ospfv3.ospfv3 import (
Ospfv3,
)
def main():
"""
Main entry point for module execution
:returns: the result form module invocation
"""
required_if = [
("state", "merged", ("config",)),
("state", "replaced", ("config",)),
("state", "overridden", ("config",)),
("state", "rendered", ("config",)),
("state", "parsed", ("running_config",)),
]
mutually_exclusive = [("config", "running_config")]
module = AnsibleModule(
argument_spec=Ospfv3Args.argument_spec,
required_if=required_if,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True,
)
result = Ospfv3(module).execute_module()
module.exit_json(**result)
if __name__ == "__main__":
main()
| 37.573596
| 110
| 0.474092
| 6,252
| 74,283
| 5.582214
| 0.091171
| 0.024069
| 0.039713
| 0.039456
| 0.821375
| 0.798138
| 0.786132
| 0.773668
| 0.763095
| 0.755444
| 0
| 0.019276
| 0.474806
| 74,283
| 1,976
| 111
| 37.592611
| 0.875298
| 0.00805
| 0
| 0.850344
| 0
| 0.004231
| 0.98679
| 0.023564
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000529
| false
| 0.001586
| 0.002115
| 0
| 0.002644
| 0.000529
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
44e44b3b723cf9ca006735d5459aa6f13a625866
| 18,264
|
py
|
Python
|
clayful/models/order.py
|
Clayful/clayful-python
|
ddd5f1f986fb0079d5128e17f4b0fdce83b4cec1
|
[
"MIT"
] | null | null | null |
clayful/models/order.py
|
Clayful/clayful-python
|
ddd5f1f986fb0079d5128e17f4b0fdce83b4cec1
|
[
"MIT"
] | 3
|
2020-04-17T05:24:06.000Z
|
2022-02-10T09:00:22.000Z
|
clayful/models/order.py
|
Clayful/clayful-python
|
ddd5f1f986fb0079d5128e17f4b0fdce83b4cec1
|
[
"MIT"
] | null | null | null |
class Order:
Clayful = None
name = 'Order'
path = 'orders'
@staticmethod
def config(clayful):
Order.Clayful = clayful
return Order
@staticmethod
def accept_refund(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'accept_refund',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/refunds/{refundId}/accepted',
'params': ('orderId', 'refundId', ),
'without_payload': True,
'args': args
})
@staticmethod
def authenticate(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'authenticate',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/auth',
'params': ('orderId', ),
'args': args
})
@staticmethod
def cancel(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'cancel',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/cancellation',
'params': ('orderId', ),
'args': args
})
@staticmethod
def cancel_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'cancel_for_me',
'http_method': 'POST',
'path': '/v1/me/orders/{orderId}/cancellation',
'params': ('orderId', ),
'args': args
})
@staticmethod
def cancel_refund(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'cancel_refund',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/refunds/{refundId}/cancellation',
'params': ('orderId', 'refundId', ),
'args': args
})
@staticmethod
def cancel_refund_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'cancel_refund_for_me',
'http_method': 'POST',
'path': '/v1/me/orders/{orderId}/refunds/{refundId}/cancellation',
'params': ('orderId', 'refundId', ),
'args': args
})
@staticmethod
def check_ticket(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'check_ticket',
'http_method': 'POST',
'path': '/v1/orders/tickets/{code}/validity',
'params': ('code', ),
'args': args
})
@staticmethod
def count(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'count',
'http_method': 'GET',
'path': '/v1/orders/count',
'params': (),
'args': args
})
@staticmethod
def count_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'count_for_me',
'http_method': 'GET',
'path': '/v1/me/orders/count',
'params': (),
'args': args
})
@staticmethod
def create_download_url(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'create_download_url',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/items/{itemId}/download/url',
'params': ('orderId', 'itemId', ),
'without_payload': True,
'args': args
})
@staticmethod
def create_download_url_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'create_download_url_for_me',
'http_method': 'POST',
'path': '/v1/me/orders/{orderId}/items/{itemId}/download/url',
'params': ('orderId', 'itemId', ),
'without_payload': True,
'args': args
})
@staticmethod
def create_fulfillment(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'create_fulfillment',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/fulfillments',
'params': ('orderId', ),
'args': args
})
@staticmethod
def delete(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'delete',
'http_method': 'DELETE',
'path': '/v1/orders/{orderId}',
'params': ('orderId', ),
'args': args
})
@staticmethod
def delete_fulfillment(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'delete_fulfillment',
'http_method': 'DELETE',
'path': '/v1/orders/{orderId}/fulfillments/{fulfillmentId}',
'params': ('orderId', 'fulfillmentId', ),
'args': args
})
@staticmethod
def delete_inventory_operation(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'delete_inventory_operation',
'http_method': 'DELETE',
'path': '/v1/orders/{orderId}/inventory/operations/{operationId}',
'params': ('orderId', 'operationId', ),
'args': args
})
@staticmethod
def delete_metafield(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'delete_metafield',
'http_method': 'DELETE',
'path': '/v1/orders/{orderId}/meta/{field}',
'params': ('orderId', 'field', ),
'args': args
})
@staticmethod
def delete_refund(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'delete_refund',
'http_method': 'DELETE',
'path': '/v1/orders/{orderId}/refunds/{refundId}',
'params': ('orderId', 'refundId', ),
'args': args
})
@staticmethod
def get(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'get',
'http_method': 'GET',
'path': '/v1/orders/{orderId}',
'params': ('orderId', ),
'args': args
})
@staticmethod
def get_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'get_for_me',
'http_method': 'GET',
'path': '/v1/me/orders/{orderId}',
'params': ('orderId', ),
'args': args
})
@staticmethod
def increase_metafield(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'increase_metafield',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/meta/{field}/inc',
'params': ('orderId', 'field', ),
'args': args
})
@staticmethod
def list(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'list',
'http_method': 'GET',
'path': '/v1/orders',
'params': (),
'args': args
})
@staticmethod
def list_by_subscription(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'list_by_subscription',
'http_method': 'GET',
'path': '/v1/subscriptions/{subscriptionId}/orders',
'params': ('subscriptionId', ),
'args': args
})
@staticmethod
def list_by_subscription_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'list_by_subscription_for_me',
'http_method': 'GET',
'path': '/v1/me/subscriptions/{subscriptionId}/orders',
'params': ('subscriptionId', ),
'args': args
})
@staticmethod
def list_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'list_for_me',
'http_method': 'GET',
'path': '/v1/me/orders',
'params': (),
'args': args
})
@staticmethod
def list_inventory_operations(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'list_inventory_operations',
'http_method': 'GET',
'path': '/v1/orders/{orderId}/inventory/operations',
'params': ('orderId', ),
'args': args
})
@staticmethod
def mark_as_done(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'mark_as_done',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/done',
'params': ('orderId', ),
'without_payload': True,
'args': args
})
@staticmethod
def mark_as_not_received(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'mark_as_not_received',
'http_method': 'DELETE',
'path': '/v1/orders/{orderId}/received',
'params': ('orderId', ),
'args': args
})
@staticmethod
def mark_as_not_received_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'mark_as_not_received_for_me',
'http_method': 'DELETE',
'path': '/v1/me/orders/{orderId}/received',
'params': ('orderId', ),
'args': args
})
@staticmethod
def mark_as_received(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'mark_as_received',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/received',
'params': ('orderId', ),
'without_payload': True,
'args': args
})
@staticmethod
def mark_as_received_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'mark_as_received_for_me',
'http_method': 'POST',
'path': '/v1/me/orders/{orderId}/received',
'params': ('orderId', ),
'without_payload': True,
'args': args
})
@staticmethod
def mark_as_undone(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'mark_as_undone',
'http_method': 'DELETE',
'path': '/v1/orders/{orderId}/done',
'params': ('orderId', ),
'args': args
})
@staticmethod
def pull_from_metafield(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'pull_from_metafield',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/meta/{field}/pull',
'params': ('orderId', 'field', ),
'args': args
})
@staticmethod
def push_to_metafield(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'push_to_metafield',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/meta/{field}/push',
'params': ('orderId', 'field', ),
'args': args
})
@staticmethod
def register_payment_method(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'register_payment_method',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/transactions/payments/methods',
'params': ('orderId', ),
'args': args
})
@staticmethod
def request_refund(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'request_refund',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/refunds',
'params': ('orderId', ),
'args': args
})
@staticmethod
def request_refund_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'request_refund_for_me',
'http_method': 'POST',
'path': '/v1/me/orders/{orderId}/refunds',
'params': ('orderId', ),
'args': args
})
@staticmethod
def restock_all_refund_items(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'restock_all_refund_items',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/refunds/{refundId}/restock/all',
'params': ('orderId', 'refundId', ),
'without_payload': True,
'args': args
})
@staticmethod
def restock_refund_items(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'restock_refund_items',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/refunds/{refundId}/restock',
'params': ('orderId', 'refundId', ),
'args': args
})
@staticmethod
def sync_inventory(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'sync_inventory',
'http_method': 'POST',
'path': '/v1/orders/{orderId}/synced',
'params': ('orderId', ),
'without_payload': True,
'args': args
})
@staticmethod
def unaccept_refund(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'unaccept_refund',
'http_method': 'DELETE',
'path': '/v1/orders/{orderId}/refunds/{refundId}/accepted',
'params': ('orderId', 'refundId', ),
'args': args
})
@staticmethod
def unregister_payment_method(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'unregister_payment_method',
'http_method': 'DELETE',
'path': '/v1/orders/{orderId}/transactions/payments/methods/{paymentMethodId}',
'params': ('orderId', 'paymentMethodId', ),
'args': args
})
@staticmethod
def update(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'update',
'http_method': 'PUT',
'path': '/v1/orders/{orderId}',
'params': ('orderId', ),
'args': args
})
@staticmethod
def update_cancellation(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'update_cancellation',
'http_method': 'PUT',
'path': '/v1/orders/{orderId}/cancellation',
'params': ('orderId', ),
'args': args
})
@staticmethod
def update_cancellation_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'update_cancellation_for_me',
'http_method': 'PUT',
'path': '/v1/me/orders/{orderId}/cancellation',
'params': ('orderId', ),
'args': args
})
@staticmethod
def update_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'update_for_me',
'http_method': 'PUT',
'path': '/v1/me/orders/{orderId}',
'params': ('orderId', ),
'args': args
})
@staticmethod
def update_fulfillment(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'update_fulfillment',
'http_method': 'PUT',
'path': '/v1/orders/{orderId}/fulfillments/{fulfillmentId}',
'params': ('orderId', 'fulfillmentId', ),
'args': args
})
@staticmethod
def update_item(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'update_item',
'http_method': 'PUT',
'path': '/v1/orders/{orderId}/items/{itemId}',
'params': ('orderId', 'itemId', ),
'args': args
})
@staticmethod
def update_refund(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'update_refund',
'http_method': 'PUT',
'path': '/v1/orders/{orderId}/refunds/{refundId}',
'params': ('orderId', 'refundId', ),
'args': args
})
@staticmethod
def update_refund_cancellation(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'update_refund_cancellation',
'http_method': 'PUT',
'path': '/v1/orders/{orderId}/refunds/{refundId}/cancellation',
'params': ('orderId', 'refundId', ),
'args': args
})
@staticmethod
def update_refund_cancellation_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'update_refund_cancellation_for_me',
'http_method': 'PUT',
'path': '/v1/me/orders/{orderId}/refunds/{refundId}/cancellation',
'params': ('orderId', 'refundId', ),
'args': args
})
@staticmethod
def update_refund_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'update_refund_for_me',
'http_method': 'PUT',
'path': '/v1/me/orders/{orderId}/refunds/{refundId}',
'params': ('orderId', 'refundId', ),
'args': args
})
@staticmethod
def update_transactions(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'update_transactions',
'http_method': 'PUT',
'path': '/v1/orders/{orderId}/transactions',
'params': ('orderId', ),
'args': args
})
@staticmethod
def update_transactions_for_me(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'update_transactions_for_me',
'http_method': 'PUT',
'path': '/v1/me/orders/{orderId}/transactions',
'params': ('orderId', ),
'without_payload': True,
'args': args
})
@staticmethod
def use_ticket(*args):
return Order.Clayful.call_api({
'model_name': Order.name,
'method_name': 'use_ticket',
'http_method': 'POST',
'path': '/v1/orders/tickets/{code}/used',
'params': ('code', ),
'without_payload': True,
'args': args
})
| 27.178571
| 94
| 0.548675
| 1,817
| 18,264
| 5.286186
| 0.051183
| 0.069964
| 0.084331
| 0.123686
| 0.935867
| 0.911921
| 0.892868
| 0.769183
| 0.754086
| 0.709735
| 0
| 0.004147
| 0.286958
| 18,264
| 671
| 95
| 27.219076
| 0.733395
| 0
| 0
| 0.738351
| 0
| 0
| 0.342879
| 0.120243
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098566
| false
| 0
| 0
| 0.096774
| 0.204301
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7834b6fddd81469412feac1fbb3b75d1909a5de7
| 224
|
py
|
Python
|
confu/tools/toolchains/__init__.py
|
tiny-dnn/confu
|
8f74d9fc0c04efe8cd1b92ae5f43a5d9b686500e
|
[
"MIT"
] | null | null | null |
confu/tools/toolchains/__init__.py
|
tiny-dnn/confu
|
8f74d9fc0c04efe8cd1b92ae5f43a5d9b686500e
|
[
"MIT"
] | null | null | null |
confu/tools/toolchains/__init__.py
|
tiny-dnn/confu
|
8f74d9fc0c04efe8cd1b92ae5f43a5d9b686500e
|
[
"MIT"
] | 1
|
2020-11-16T18:06:25.000Z
|
2020-11-16T18:06:25.000Z
|
from confu.tools.toolchains.base import Toolchain
from confu.tools.toolchains.unix import UnixToolchain
from confu.tools.toolchains.nacl import NaClToolchain
from confu.tools.toolchains.emscripten import EmscriptenToolchain
| 44.8
| 65
| 0.875
| 28
| 224
| 7
| 0.464286
| 0.183673
| 0.285714
| 0.489796
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 224
| 4
| 66
| 56
| 0.942308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7862aa390f87b94b30627ed8e16cee4d3f02b084
| 169
|
py
|
Python
|
qutipy/misc/__init__.py
|
sumeetkhatri/QuTIPy
|
ca2a3344c1caa818504425496ea37278d80b1c44
|
[
"Apache-2.0"
] | 19
|
2020-11-11T13:00:22.000Z
|
2022-03-14T11:18:04.000Z
|
qutipy/misc/__init__.py
|
sumeetkhatri/QuTIPy
|
ca2a3344c1caa818504425496ea37278d80b1c44
|
[
"Apache-2.0"
] | null | null | null |
qutipy/misc/__init__.py
|
sumeetkhatri/QuTIPy
|
ca2a3344c1caa818504425496ea37278d80b1c44
|
[
"Apache-2.0"
] | 1
|
2022-03-03T15:20:15.000Z
|
2022-03-03T15:20:15.000Z
|
from qutipy.misc.base_number_to_int import base_number_to_int
from qutipy.misc.cvxpy_to_numpy import cvxpy_to_numpy
from qutipy.misc.numpy_to_cvxpy import numpy_to_cvxpy
| 56.333333
| 61
| 0.899408
| 32
| 169
| 4.3125
| 0.3125
| 0.217391
| 0.304348
| 0.217391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065089
| 169
| 3
| 62
| 56.333333
| 0.873418
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7883155b6611d7eac76ec25c2e1f69a3300e7e30
| 22,523
|
py
|
Python
|
tests/circuit_graph/test_circuit_graph_hash.py
|
Jaybsoni/pennylane
|
3871332dd962fb4f62bf4989d109bcb9f2128d7b
|
[
"Apache-2.0"
] | 2
|
2019-09-02T00:28:31.000Z
|
2021-07-16T09:58:05.000Z
|
tests/circuit_graph/test_circuit_graph_hash.py
|
Jaybsoni/pennylane
|
3871332dd962fb4f62bf4989d109bcb9f2128d7b
|
[
"Apache-2.0"
] | null | null | null |
tests/circuit_graph/test_circuit_graph_hash.py
|
Jaybsoni/pennylane
|
3871332dd962fb4f62bf4989d109bcb9f2128d7b
|
[
"Apache-2.0"
] | 1
|
2019-09-02T00:29:26.000Z
|
2019-09-02T00:29:26.000Z
|
# Copyright 2018-2020 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit and integration tests for creating the :mod:`pennylane` :attr:`QNode.qtape.graph.hash` attribute.
"""
import pytest
import numpy as np
import pennylane as qml
from pennylane.operation import Tensor
from pennylane.circuit_graph import CircuitGraph
from pennylane.wires import Wires
class TestCircuitGraphHash:
"""Test the creation of a hash on a CircuitGraph"""
numeric_queues = [
([qml.RX(0.3, wires=[0])], [], "RX!0.3![0]|||"),
(
[
qml.RX(0.3, wires=[0]),
qml.RX(0.4, wires=[1]),
qml.RX(0.5, wires=[2]),
],
[],
"RX!0.3![0]RX!0.4![1]RX!0.5![2]|||",
),
]
@pytest.mark.parametrize("queue, observable_queue, expected_string", numeric_queues)
def test_serialize_numeric_arguments(self, queue, observable_queue, expected_string):
"""Tests that the same hash is created for two circuitgraphs that have numeric arguments."""
circuit_graph_1 = CircuitGraph(queue, observable_queue, Wires([0, 1, 2]))
circuit_graph_2 = CircuitGraph(queue, observable_queue, Wires([0, 1, 2]))
assert circuit_graph_1.serialize() == circuit_graph_2.serialize()
assert expected_string == circuit_graph_1.serialize()
observable1 = qml.PauliZ(0)
observable1.return_type = not None
observable2 = qml.Hermitian(np.array([[1, 0], [0, -1]]), wires=[0])
observable2.return_type = not None
observable3 = Tensor(qml.PauliZ(0) @ qml.PauliZ(1))
observable3.return_type = not None
numeric_observable_queue = [
([], [observable1], "|||PauliZ[0]"),
([], [observable2], "|||Hermitian![[ 1 0]\n [ 0 -1]]![0]"),
([], [observable3], "|||['PauliZ', 'PauliZ'][0, 1]"),
]
@pytest.mark.parametrize("queue, observable_queue, expected_string", numeric_observable_queue)
def test_serialize_numeric_arguments_observables(
self, queue, observable_queue, expected_string
):
"""Tests that the same hash is created for two circuitgraphs that have identical queues and empty variable_deps."""
circuit_graph_1 = CircuitGraph(queue, observable_queue, Wires([0, 1]))
circuit_graph_2 = CircuitGraph(queue, observable_queue, Wires([0, 1]))
assert circuit_graph_1.serialize() == circuit_graph_2.serialize()
assert expected_string == circuit_graph_1.serialize()
class TestQNodeCircuitHashIntegration:
"""Test for the circuit hash that is being created for a QNode during evaluation (inside of _construct)"""
def test_evaluate_circuit_hash_numeric(self):
"""Tests that the circuit hash of identical circuits containing only numeric parameters are equal"""
dev = qml.device("default.qubit", wires=2)
a = 0.3
b = 0.2
def circuit1():
qml.RX(a, wires=[0])
qml.RY(b, wires=[1])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0))
node1 = qml.QNode(circuit1, dev)
node1.construct([], {})
circuit_hash_1 = node1.qtape.graph.hash
def circuit2():
qml.RX(a, wires=[0])
qml.RY(b, wires=[1])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0))
node2 = qml.QNode(circuit2, dev)
node2.construct([], {})
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 == circuit_hash_2
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 7), np.linspace(-2 * np.pi, 2 * np.pi, 7) ** 2 / 11),
)
def test_evaluate_circuit_hash_symbolic(self, x, y):
"""Tests that the circuit hash of identical circuits containing only symbolic parameters are equal"""
dev = qml.device("default.qubit", wires=2)
def circuit1(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0))
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0))
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 == circuit_hash_2
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 7), np.linspace(-2 * np.pi, 2 * np.pi, 7) ** 2 / 11),
)
def test_evaluate_circuit_hash_numeric_and_symbolic(self, x, y):
"""Tests that the circuit hash of identical circuits containing numeric and symbolic parameters are equal"""
dev = qml.device("default.qubit", wires=3)
def circuit1(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1])
qml.RZ(0.3, wires=[2])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0))
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1])
qml.RZ(0.3, wires=[2])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0))
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 == circuit_hash_2
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 7), np.linspace(-2 * np.pi, 2 * np.pi, 7) ** 2 / 11),
)
def test_evaluate_circuit_hash_numeric_and_symbolic_tensor_return(self, x, y):
"""Tests that the circuit hashes of identical circuits having a tensor product in the return
statement are equal"""
dev = qml.device("default.qubit", wires=3)
def circuit1(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1])
qml.RZ(0.3, wires=[2])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0))
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1])
qml.RZ(0.3, wires=[2])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0))
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 == circuit_hash_2
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 7), np.linspace(-2 * np.pi, 2 * np.pi, 7) ** 2 / 11),
)
def test_evaluate_circuit_hash_same_operation_has_numeric_and_symbolic(self, x, y):
"""Tests that the circuit hashes of identical circuits where one operation has both numeric
and symbolic arguments are equal"""
dev = qml.device("default.qubit", wires=3)
def circuit1(x, y):
qml.Rot(x, y, 0.3, wires=[0])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.Rot(x, y, 0.3, wires=[0])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 == circuit_hash_2
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 7), np.linspace(-2 * np.pi, 2 * np.pi, 7) ** 2 / 11),
)
def test_evaluate_circuit_hash_numeric_and_symbolic_return_type_does_not_matter(self, x, y):
"""Tests that the circuit hashes of identical circuits only differing on their return types are equal"""
dev = qml.device("default.qubit", wires=3)
def circuit1(x, y):
qml.Rot(x, y, 0.3, wires=[0])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.Rot(x, y, 0.3, wires=[0])
qml.CNOT(wires=[0, 1])
return qml.var(qml.PauliZ(0) @ qml.PauliX(1))
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
def circuit3(x, y):
qml.Rot(x, y, 0.3, wires=[0])
qml.CNOT(wires=[0, 1])
return qml.sample(qml.PauliZ(0) @ qml.PauliX(1))
node3 = qml.QNode(circuit1, dev)
node3(x, y)
circuit_hash_3 = node3.qtape.graph.hash
assert circuit_hash_1 == circuit_hash_2 == circuit_hash_3
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 7), np.linspace(-2 * np.pi, 2 * np.pi, 7) ** 2 / 11),
)
def test_evaluate_circuit_hash_hermitian(self, x, y):
"""Tests that the circuit hashes of identical circuits containing a Hermitian observable are equal"""
dev = qml.device("default.qubit", wires=3)
matrix = np.array([[1, 0], [0, 1]])
def circuit1(x, y):
qml.Rot(x, y, 0.3, wires=[0])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.Hermitian(matrix, wires=[0]) @ qml.PauliX(1))
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.Rot(x, y, 0.3, wires=[0])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.Hermitian(matrix, wires=[0]) @ qml.PauliX(1))
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 == circuit_hash_2
class TestQNodeCircuitHashDifferentHashIntegration:
"""Tests for checking that different circuit graph hashes are being created for different circuits in a QNode during evaluation (inside of _construct)"""
def test_evaluate_circuit_hash_numeric_different(self):
"""Tests that the circuit hashes of identical circuits except for one numeric value are different"""
dev = qml.device("default.qubit", wires=2)
a = 0.3
b = 0.2
def circuit1():
qml.RX(a, wires=[0])
qml.RY(b, wires=[1])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node1 = qml.QNode(circuit1, dev)
node1.construct([], {})
circuit_hash_1 = node1.qtape.graph.hash
c = 0.6
def circuit2():
qml.RX(c, wires=[0])
qml.RY(b, wires=[1])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node2 = qml.QNode(circuit2, dev)
node2.construct([], {})
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 != circuit_hash_2
def test_evaluate_circuit_hash_numeric_different_operation(self):
"""Tests that the circuit hashes of identical circuits except for one of the operations are different"""
dev = qml.device("default.qubit", wires=2)
a = 0.3
def circuit1():
qml.RX(a, wires=[0])
return qml.expval(qml.PauliZ(0))
node1 = qml.QNode(circuit1, dev)
node1.construct([], {})
circuit_hash_1 = node1.qtape.graph.hash
def circuit2():
qml.RY(a, wires=[0])
return qml.expval(qml.PauliZ(0))
node2 = qml.QNode(circuit2, dev)
node2.construct([], {})
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 != circuit_hash_2
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 7), np.linspace(-2 * np.pi, 2 * np.pi, 7) ** 2 / 11),
)
def test_evaluate_circuit_hash_numeric_and_symbolic_operation_differs(self, x, y):
"""Tests that the circuit hashes of identical circuits that have numeric and symbolic arguments
except for one of the operations are different"""
dev = qml.device("default.qubit", wires=3)
def circuit1(x, y):
qml.RX(x, wires=[0])
qml.RZ(y, wires=[1]) # <-------------------------------------- RZ
qml.RZ(0.3, wires=[2])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1]) # <-------------------------------------- RY
qml.RZ(0.3, wires=[2])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 != circuit_hash_2
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 7), np.linspace(-2 * np.pi, 2 * np.pi, 7) ** 2 / 11),
)
def test_evaluate_circuit_hash_different_return_observable_vs_tensor(self, x, y):
"""Tests that the circuit hashes of identical circuits except for the return statement are different"""
dev = qml.device("default.qubit", wires=3)
def circuit1(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1])
qml.RZ(0.3, wires=[2])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0)) # <------------- qml.PauliZ(0)
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1])
qml.RZ(0.3, wires=[2])
qml.CNOT(wires=[0, 1])
return qml.expval(
qml.PauliZ(0) @ qml.PauliX(1)
) # <------------- qml.PauliZ(0) @ qml.PauliX(1)
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 != circuit_hash_2
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 7), np.linspace(-2 * np.pi, 2 * np.pi, 7) ** 2 / 11),
)
def test_evaluate_circuit_hash_same_operation_has_numeric_and_symbolic_different_order(
self, x, y
):
"""Tests that the circuit hashes of identical circuits except for the order of numeric and symbolic arguments
in one of the operations are different."""
dev = qml.device("default.qubit", wires=3)
def circuit1(x, y):
qml.Rot(x, 0.3, y, wires=[0]) # <------------- x, 0.3, y
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.Rot(x, y, 0.3, wires=[0]) # <------------- x, y, 0.3
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 != circuit_hash_2
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 7), np.linspace(-2 * np.pi, 2 * np.pi, 7) ** 2 / 11),
)
def test_evaluate_circuit_hash_same_operation_has_numeric_and_symbolic_different_argument(
self, x, y
):
"""Tests that the circuit hashes of identical circuits except for the numeric value
in one of the operations are different."""
dev = qml.device("default.qubit", wires=3)
def circuit1(x, y):
qml.Rot(x, y, 0.3, wires=[0]) # <------------- 0.3
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.Rot(x, y, 0.5, wires=[0]) # <------------- 0.5
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 != circuit_hash_2
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 2), np.linspace(-2 * np.pi, 2 * np.pi, 2) ** 2 / 11),
)
def test_evaluate_circuit_hash_same_operation_has_numeric_and_symbolic_different_wires(
self, x, y
):
"""Tests that the circuit hashes of identical circuits except for the wires
in one of the operations are different."""
dev = qml.device("default.qubit", wires=3)
def circuit1(x, y):
qml.Rot(x, y, 0.3, wires=[0])
qml.CNOT(wires=[0, 1]) # <------ wires = [0, 1]
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.Rot(x, y, 0.3, wires=[0])
qml.CNOT(wires=[1, 0]) # <------ wires = [1, 0]
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 != circuit_hash_2
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 2), np.linspace(-2 * np.pi, 2 * np.pi, 2) ** 2 / 11),
)
def test_evaluate_circuit_hash_same_operation_has_numeric_and_symbolic_different_wires_in_return(
self, x, y
):
"""Tests that the circuit hashes of identical circuits except for the wires
in the return statement are different."""
dev = qml.device("default.qubit", wires=3)
def circuit1(x, y):
qml.Rot(x, y, 0.3, wires=[0])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1)) # <----- (0) @ (1)
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.Rot(x, y, 0.3, wires=[0])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(2)) # <----- (0) @ (2)
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 != circuit_hash_2
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 7), np.linspace(-2 * np.pi, 2 * np.pi, 7) ** 2 / 11),
)
def test_evaluate_circuit_hash_numeric_and_symbolic_different_parameter(self, x, y):
"""Tests that the circuit hashes of identical circuits except for the numeric argument of a signle operation
in the circuits are different"""
dev = qml.device("default.qubit", wires=3)
def circuit1(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1])
qml.RZ(0.3, wires=[2]) # <------------- 0.3
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.RX(x, wires=[0])
qml.RY(y, wires=[1])
qml.RZ(0.5, wires=[2]) # <------------- 0.5
qml.CNOT(wires=[0, 1])
return qml.expval(qml.PauliZ(0) @ qml.PauliX(1))
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 != circuit_hash_2
@pytest.mark.parametrize(
"x,y",
zip(np.linspace(-2 * np.pi, 2 * np.pi, 2), np.linspace(-2 * np.pi, 2 * np.pi, 2) ** 2 / 11),
)
def test_evaluate_circuit_hash_hermitian_different_matrices(self, x, y):
"""Tests that the circuit hashes of identical circuits except for the matrix argument of the Hermitian observable
in the return statement are different."""
dev = qml.device("default.qubit", wires=3)
matrix_1 = np.array([[1, 0], [0, 1]])
matrix_2 = np.array([[1, 0], [0, -1]])
def circuit1(x, y):
qml.Rot(x, y, 0.3, wires=[0])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.Hermitian(matrix_1, wires=[0]) @ qml.PauliX(1))
node1 = qml.QNode(circuit1, dev)
node1(x, y)
circuit_hash_1 = node1.qtape.graph.hash
def circuit2(x, y):
qml.Rot(x, y, 0.3, wires=[0])
qml.CNOT(wires=[0, 1])
return qml.expval(qml.Hermitian(matrix_2, wires=[0]) @ qml.PauliX(1))
node2 = qml.QNode(circuit2, dev)
node2(x, y)
circuit_hash_2 = node2.qtape.graph.hash
assert circuit_hash_1 != circuit_hash_2
@pytest.mark.usefixtures("skip_if_no_dask_support")
def test_compiled_program_was_stored(self):
"""Test that QVM device stores the compiled program correctly"""
dev = qml.device("default.qubit", wires=3)
def circuit(params, wires):
qml.Hadamard(0)
qml.CNOT(wires=[0, 1])
obs = [qml.PauliZ(0) @ qml.PauliZ(1)]
obs_list = obs * 6
qnodes = qml.map(circuit, obs_list, dev)
qnodes([], parallel=True)
hashes = set()
for qnode in qnodes:
hashes.add(qnode.qtape.graph.hash)
assert len(hashes) == 1
| 35.357928
| 157
| 0.572437
| 3,186
| 22,523
| 3.939736
| 0.070308
| 0.016412
| 0.022307
| 0.016252
| 0.821702
| 0.814452
| 0.804653
| 0.799235
| 0.794057
| 0.780035
| 0
| 0.047445
| 0.281312
| 22,523
| 636
| 158
| 35.413522
| 0.727992
| 0.156595
| 0
| 0.762637
| 0
| 0.002198
| 0.026733
| 0.002982
| 0
| 0
| 0
| 0
| 0.048352
| 1
| 0.123077
| false
| 0
| 0.013187
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
78a9c4b81039214d92bcf1ead15d901162d57a66
| 148
|
py
|
Python
|
pyFT/__init__.py
|
lesibius/pyFT
|
509bb047d629d8de5049df74e02b09fb91610347
|
[
"MIT"
] | null | null | null |
pyFT/__init__.py
|
lesibius/pyFT
|
509bb047d629d8de5049df74e02b09fb91610347
|
[
"MIT"
] | null | null | null |
pyFT/__init__.py
|
lesibius/pyFT
|
509bb047d629d8de5049df74e02b09fb91610347
|
[
"MIT"
] | null | null | null |
from pyFT.Request import *
from pyFT.Result import *
from pyFT.FTQuerySyntax import *
from pyFT.FTError import *
import pyFT.FTHelper as FTHelper
| 18.5
| 32
| 0.790541
| 21
| 148
| 5.571429
| 0.428571
| 0.273504
| 0.358974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148649
| 148
| 7
| 33
| 21.142857
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ecbca341dd1b1d6c6a4c9fb5624c8a3b368a1061
| 80
|
py
|
Python
|
src/spaceone/board/error/__init__.py
|
spaceone-dev/board
|
1733756344240b1498bca85b6e1b88d741425ea0
|
[
"Apache-2.0"
] | null | null | null |
src/spaceone/board/error/__init__.py
|
spaceone-dev/board
|
1733756344240b1498bca85b6e1b88d741425ea0
|
[
"Apache-2.0"
] | 1
|
2022-03-23T06:44:15.000Z
|
2022-03-23T06:52:39.000Z
|
src/spaceone/board/error/__init__.py
|
spaceone-dev/board
|
1733756344240b1498bca85b6e1b88d741425ea0
|
[
"Apache-2.0"
] | 1
|
2022-03-22T08:59:01.000Z
|
2022-03-22T08:59:01.000Z
|
from spaceone.board.error.board import *
from spaceone.board.error.post import *
| 40
| 40
| 0.8125
| 12
| 80
| 5.416667
| 0.5
| 0.369231
| 0.523077
| 0.676923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0875
| 80
| 2
| 41
| 40
| 0.890411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ecd71f4acde2201d880cb6e181bc9fe9efa1cc57
| 15,286
|
py
|
Python
|
data/pytorch_example/model.py
|
YiRuitao/single-path-nas-pytorch
|
13de3e6162c868e94a79a836784f7219dfc9367a
|
[
"MIT"
] | 1
|
2021-07-20T09:25:59.000Z
|
2021-07-20T09:25:59.000Z
|
data/pytorch_example/model.py
|
YiRuitao/Single-Path-NAS-with-MAESTRO
|
13de3e6162c868e94a79a836784f7219dfc9367a
|
[
"MIT"
] | null | null | null |
data/pytorch_example/model.py
|
YiRuitao/Single-Path-NAS-with-MAESTRO
|
13de3e6162c868e94a79a836784f7219dfc9367a
|
[
"MIT"
] | null | null | null |
import torch
import torchvision.models as models
import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
import csv
from util import *
def profile_MnasNet(dataflow):
model_name = "MnasNet-A1"
print("="*50)
print("Profiling model: ", model_name)
print("="*50)
input_size = (3, 224, 224)
num_classes = 1000
flops = 0
profiled_layers = []
blocks_args = []
with open("data/" + model_name + ".csv", mode='r') as model_file:
model_reader = csv.reader(model_file, delimiter=',')
for row in model_reader:
blocks_args += row
# Stem part
layer_type, kernel_size, stride, out_channels = "Conv", 3, (2, 2), 32
output_size, nb_params, R, S, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers.append(tuple((layer_type, input_size, output_size, stride, nb_params, R, S)))
flops += flop
# MnasNet-A1: SepConv
layer_type, input_size, kernel_size, stride, out_channels = "SepConv", output_size, 3, (1, 1), 16
output_size1, nb_params1, R1, S1, output_size, nb_params2, R2, S2, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers.append(tuple(("DWConv", input_size, output_size1, stride, nb_params1, R1, S1)))
profiled_layers.append(tuple(("Conv", output_size1, output_size, stride, nb_params2, R2, S2)))
flops += flop
# Backbone part
for blocks_string in blocks_args:
layers, output_size, flop = profile_blockargs(blocks_string, output_size)
profiled_layers += layers
flops += flop
# Head part
layer_type, input_size, kernel_size, stride, out_channels = "Conv", output_size, 1, (1, 1), 1280
output_size, nb_params, R, S, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers += [tuple((layer_type, input_size, output_size, stride, nb_params, R, S))]
flops += flop
layer_type, input_size, out_features = "Linear", output_size, num_classes
output_size, nb_params, R, S, flop = get_linear_output_size_and_nb_param(input_size, out_features)
profiled_layers += [tuple((layer_type, input_size, output_size, None, nb_params, R, S))]
flops += flop
print("Total number of flops:", flops)
summary = make_summary(profiled_layers, dataflow=dataflow, model_name=model_name)
# Get number of parameters
layer_names = list(summary.keys())
params = list(map(lambda x: int(summary[x]['nb_params']), layer_names))
print("Total number of parameters:", sum(params))
def profile_MobileNetV2(dataflow):
model_name = "MobileNet-V2"
print("="*50)
print("Profiling model: ", model_name)
print("="*50)
input_size = (3, 224, 224)
num_classes = 1000
flops = 0
profiled_layers = []
blocks_args = []
with open("data/" + model_name + ".csv", mode='r') as model_file:
model_reader = csv.reader(model_file, delimiter=',')
for row in model_reader:
blocks_args += row
# Stem part
layer_type, kernel_size, stride, out_channels = "Conv", 3, (2, 2), 32
output_size, nb_params, R, S, flop =\
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers.append(tuple((layer_type, input_size, output_size, stride, nb_params, R, S)))
flops += flop
# Mobilenet-V2:
layer_type, input_size, kernel_size, stride, out_channels = "SepConv", output_size, 3, (1, 1), 16
output_size1, nb_params1, R1, S1, output_size, nb_params2, R2, S2, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers.append(tuple(("DWConv", input_size, output_size1, stride, nb_params1, R1, S1)))
profiled_layers.append(tuple(("Conv", output_size1, output_size, stride, nb_params2, R2, S2)))
flops += flop
# Backbone part
for blocks_string in blocks_args:
layers, output_size, flop = profile_blockargs(blocks_string, output_size)
profiled_layers += layers
flops += flop
# Head part
layer_type, input_size, kernel_size, stride, out_channels = "Conv", output_size, 1, (1, 1), 1280
output_size, nb_params, R, S, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers += [tuple((layer_type, input_size, output_size, stride, nb_params, R, S))]
flops += flop
layer_type, input_size, out_features = "Linear", output_size, num_classes
output_size, nb_params, R, S, flop = get_linear_output_size_and_nb_param(input_size, out_features)
profiled_layers += [tuple((layer_type, input_size, output_size, None, nb_params, R, S))]
flops += flop
print("Total number of flops:", flops)
summary = make_summary(profiled_layers, dataflow=dataflow, model_name=model_name)
# Get number of parameters
layer_names = list(summary.keys())
params = list(map(lambda x: int(summary[x]['nb_params']), layer_names))
print("Total number of parameters:", sum(params))
def profile_MobileNetV3_large(dataflow):
model_name = "MobileNet-V3-large"
print("="*50)
print("Profiling model: ", model_name)
print("="*50)
input_size = (3, 224, 224)
num_classes = 1000
flops = 0
profiled_layers = []
blocks_args = []
with open("data/" + model_name + ".csv", mode='r') as model_file:
model_reader = csv.reader(model_file, delimiter=',')
for row in model_reader:
blocks_args += row
# Stem part
layer_type, kernel_size, stride, out_channels = "Conv", 3, (2, 2), 16
output_size, nb_params, R, S, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers.append(tuple((layer_type, input_size, output_size, stride, nb_params, R, S)))
flops += flop
# MobileNet-V3-large: None
# Backbone part
for blocks_string in blocks_args:
layers, output_size, flop = profile_blockargs(blocks_string, output_size, use_bias=False)
profiled_layers += layers
flops += flop
# Head part
layer_type, input_size, kernel_size, stride, out_channels = "Conv", output_size, 1, (1, 1), 960
output_size, nb_params, R, S, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers += [tuple((layer_type, input_size, output_size, stride, nb_params, R, S))]
flops += flop
layer_type, input_size, out_features = "Linear", output_size, 1280
output_size, nb_params, R, S, flop = get_linear_output_size_and_nb_param(input_size, out_features, use_pool=True)
profiled_layers += [tuple((layer_type, input_size, output_size, None, nb_params, R, S))]
flops += flop
layer_type, input_size, out_features = "Linear", output_size, num_classes
output_size, nb_params, R, S, flop = get_linear_output_size_and_nb_param(input_size, out_features, use_pool=False)
profiled_layers += [tuple((layer_type, input_size, output_size, None, nb_params, R, S))]
flops += flop
print("Total number of flops:", flops)
summary = make_summary(profiled_layers, dataflow=dataflow, model_name=model_name)
# Get number of parameters
layer_names = list(summary.keys())
params = list(map(lambda x: int(summary[x]['nb_params']), layer_names))
print("Total number of parameters:", sum(params))
def profile_MobileNetV3_small(dataflow):
model_name = "MobileNet-V3-small"
print("="*50)
print("Profiling model: ", model_name)
print("="*50)
input_size = (3, 224, 224)
num_classes = 1000
flops = 0
profiled_layers = []
blocks_args = []
with open("data/" + model_name + ".csv", mode='r') as model_file:
model_reader = csv.reader(model_file, delimiter=',')
for row in model_reader:
blocks_args += row
# Stem part
layer_type, kernel_size, stride, out_channels = "Conv", 3, (2, 2), 16
output_size, nb_params, R, S, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers.append(tuple((layer_type, input_size, output_size, stride, nb_params, R, S)))
flops += flop
# MobileNet-V3: None
# Backbone part
for blocks_string in blocks_args:
layers, output_size, flop = profile_blockargs(blocks_string, output_size, use_bias=False)
profiled_layers += layers
flops += flop
# Head part
layer_type, input_size, kernel_size, stride, out_channels = "Conv", output_size, 1, (1, 1), 576
output_size, nb_params, R, S, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers += [tuple((layer_type, input_size, output_size, stride, nb_params, R, S))]
flops += flop
# MobileNet-V3-small: SE
input_size, expansion = output_size, 0.25
output_size1, nb_params1, R1, S1, output_size, nb_params2, R2, S2, flop = \
get_se_output_and_params_and_flops(input_size, expansion=expansion, bias=False)
layers.append(tuple(("Linear", input_size, output_size1, None, nb_params1, R1, S1)))
layers.append(tuple(("Linear", output_size1, output_size, None, nb_params2, R2, S2)))
flops += flop
layer_type, input_size, out_features = "Linear", output_size, 1024
output_size, nb_params, R, S, flop = get_linear_output_size_and_nb_param(input_size, out_features, use_pool=True)
profiled_layers += [tuple((layer_type, input_size, output_size, None, nb_params, R, S))]
flops += flop
layer_type, input_size, out_features = "Linear", output_size, num_classes
output_size, nb_params, R, S, flop = get_linear_output_size_and_nb_param(input_size, out_features, use_pool=False)
profiled_layers += [tuple((layer_type, input_size, output_size, None, nb_params, R, S))]
flops += flop
print("Total number of flops:", flops)
summary = make_summary(profiled_layers, dataflow=dataflow, model_name=model_name)
# Get number of parameters
layer_names = list(summary.keys())
params = list(map(lambda x: int(summary[x]['nb_params']), layer_names))
print("Total number of parameters:", sum(params))
def profile_ProxylessNAS(dataflow):
model_name = "ProxylessNAS"
print("="*50)
print("Profiling model: ", model_name)
print("="*50)
input_size = (3, 224, 224)
num_classes = 1000
flops = 0
profiled_layers = []
blocks_args = []
with open("data/" + model_name + ".csv", mode='r') as model_file:
model_reader = csv.reader(model_file, delimiter=',')
for row in model_reader:
blocks_args += row
# Stem part
layer_type, kernel_size, stride, out_channels = "Conv", 3, (2, 2), 32
output_size, nb_params, R, S, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers.append(tuple((layer_type, input_size, output_size, stride, nb_params, R, S)))
flops += flop
# ProxylessNAS:
layer_type, input_size, kernel_size, stride, out_channels = "SepConv", output_size, 3, (1, 1), 16
output_size1, nb_params1, R1, S1, output_size, nb_params2, R2, S2, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers.append(tuple(("DWConv", input_size, output_size1, stride, nb_params1, R1, S1)))
profiled_layers.append(tuple(("Conv", output_size1, output_size, stride, nb_params2, R2, S2)))
flops += flop
# Backbone part
for blocks_string in blocks_args:
layers, output_size, flop = profile_blockargs(blocks_string, output_size)
profiled_layers += layers
flops += flop
# Head part
layer_type, input_size, kernel_size, stride, out_channels = "Conv", output_size, 1, (1, 1), 1280
output_size, nb_params, R, S, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels, use_bn=True)
profiled_layers += [tuple((layer_type, input_size, output_size, stride, nb_params, R, S))]
flops += flop
layer_type, input_size, out_features = "Linear", output_size, num_classes
output_size, nb_params, R, S, flop = get_linear_output_size_and_nb_param(input_size, out_features, use_pool=True)
profiled_layers += [tuple((layer_type, input_size, output_size, None, nb_params, R, S))]
flops += flop
print("Total number of flops:", flops)
summary = make_summary(profiled_layers, dataflow=dataflow, model_name=model_name)
# Get number of parameters
layer_names = list(summary.keys())
params = list(map(lambda x: int(summary[x]['nb_params']), layer_names))
print("Total number of parameters:", sum(params))
def profile_SinglepathNAS(dataflow):
model_name = "SinglepathNAS"
print("="*50)
print("Profiling model: ", model_name)
print("="*50)
input_size = (3, 224, 224)
num_classes = 1000
flops = 0
profiled_layers = []
blocks_args = []
with open("data/" + model_name + ".csv", mode='r') as model_file:
model_reader = csv.reader(model_file, delimiter=',')
for row in model_reader:
blocks_args += row
# Stem part
layer_type, kernel_size, stride, out_channels = "Conv", 3, (2, 2), 32
output_size, nb_params, R, S, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers.append(tuple((layer_type, input_size, output_size, stride, nb_params, R, S)))
flops += flop
# SinglepathNAS: None
# Backbone part
for blocks_string in blocks_args:
layers, output_size, flop = profile_blockargs(blocks_string, output_size)
profiled_layers += layers
flops += flop
# Head part
layer_type, input_size, kernel_size, stride, out_channels = "Conv", output_size, 1, (1, 1), 1280
output_size, nb_params, R, S, flop = \
get_conv_output_and_params_and_flops(input_size, layer_type, kernel_size, stride, out_channels=out_channels)
profiled_layers += [tuple((layer_type, input_size, output_size, stride, nb_params, R, S))]
flops += flop
layer_type, input_size, out_features = "Linear", output_size, num_classes
output_size, nb_params, R, S, flop = get_linear_output_size_and_nb_param(input_size, out_features, use_pool=True)
profiled_layers += [tuple((layer_type, input_size, output_size, None, nb_params, R, S))]
flops += flop
print("Total number of flops:", flops)
summary = make_summary(profiled_layers, dataflow=dataflow, model_name=model_name)
# Get number of parameters
layer_names = list(summary.keys())
params = list(map(lambda x: int(summary[x]['nb_params']), layer_names))
print("Total number of parameters:", sum(params))
| 44.695906
| 129
| 0.696062
| 2,168
| 15,286
| 4.585793
| 0.056273
| 0.086502
| 0.03621
| 0.040233
| 0.942265
| 0.936632
| 0.935325
| 0.932106
| 0.932106
| 0.931704
| 0
| 0.021283
| 0.188539
| 15,286
| 341
| 130
| 44.826979
| 0.780232
| 0.032055
| 0
| 0.896947
| 0
| 0
| 0.052154
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022901
| false
| 0
| 0.030534
| 0
| 0.053435
| 0.114504
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bf19f041da373831fcd1ff1a998734f4ca50f9de
| 147
|
py
|
Python
|
prettypyplot/gallery.py
|
braniii/prettypyplot
|
39d7d133fe0dc6699fafd57e00a0ec07672fd344
|
[
"BSD-3-Clause"
] | null | null | null |
prettypyplot/gallery.py
|
braniii/prettypyplot
|
39d7d133fe0dc6699fafd57e00a0ec07672fd344
|
[
"BSD-3-Clause"
] | null | null | null |
prettypyplot/gallery.py
|
braniii/prettypyplot
|
39d7d133fe0dc6699fafd57e00a0ec07672fd344
|
[
"BSD-3-Clause"
] | null | null | null |
"""
# Gallery
.. include:: ../gallery/legend/README.md
.. include:: ../gallery/colorbar/README.md
.. include:: ../gallery/subplots/README.md
"""
| 16.333333
| 42
| 0.646259
| 16
| 147
| 5.9375
| 0.4375
| 0.442105
| 0.315789
| 0.463158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102041
| 147
| 8
| 43
| 18.375
| 0.719697
| 0.931973
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1709e5de22ff390cf7b42040ee1d013227441504
| 10,181
|
py
|
Python
|
rlkit/samplers/data_collector/path_collector.py
|
HamzaHz2/rlkit
|
55f30c2f1830693624bc5d4085ab9a1ac80b30c4
|
[
"MIT"
] | 1
|
2019-10-23T11:03:28.000Z
|
2019-10-23T11:03:28.000Z
|
rlkit/samplers/data_collector/path_collector.py
|
HamzaHz2/rlkit
|
55f30c2f1830693624bc5d4085ab9a1ac80b30c4
|
[
"MIT"
] | null | null | null |
rlkit/samplers/data_collector/path_collector.py
|
HamzaHz2/rlkit
|
55f30c2f1830693624bc5d4085ab9a1ac80b30c4
|
[
"MIT"
] | 3
|
2020-11-30T15:15:31.000Z
|
2022-01-11T10:53:19.000Z
|
from collections import OrderedDict, deque
import numpy as np
from rlkit.core.eval_util import create_stats_ordered_dict
from rlkit.samplers.data_collector.base import PathCollector
from rlkit.samplers.rollout_functions import (
multiagent_multitask_rollout,
vec_multitask_rollout,
multitask_rollout,
rollout,
)
class MdpPathCollector(PathCollector):
def __init__(
self,
env,
policy,
max_num_epoch_paths_saved=None,
render=False,
render_kwargs=None,
):
if render_kwargs is None:
render_kwargs = {}
self._env = env
self._policy = policy
self._max_num_epoch_paths_saved = max_num_epoch_paths_saved
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
self._render = render
self._render_kwargs = render_kwargs
self._num_steps_total = 0
self._num_paths_total = 0
def collect_new_paths(
self, max_path_length, num_steps, discard_incomplete_paths,
):
paths = []
num_steps_collected = 0
while num_steps_collected < num_steps:
max_path_length_this_loop = min( # Do not go over num_steps
max_path_length, num_steps - num_steps_collected,
)
path = rollout(
self._env, self._policy, max_path_length=max_path_length_this_loop,
)
path_len = len(path["actions"])
if (
path_len != max_path_length
and not path["terminals"][-1]
and discard_incomplete_paths
):
break
num_steps_collected += path_len
paths.append(path)
self._num_paths_total += len(paths)
self._num_steps_total += num_steps_collected
self._epoch_paths.extend(paths)
return paths
def get_epoch_paths(self):
return self._epoch_paths
def end_epoch(self, epoch):
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
def get_diagnostics(self):
path_lens = [len(path["actions"]) for path in self._epoch_paths]
stats = OrderedDict(
[
("num steps total", self._num_steps_total),
("num paths total", self._num_paths_total),
]
)
stats.update(
create_stats_ordered_dict(
"path length", path_lens, always_show_all_stats=True,
)
)
success = [path["rewards"][-1][0] > 0 for path in self._epoch_paths]
stats["SuccessRate"] = sum(success) / len(success)
return stats
def get_snapshot(self):
return dict(
# env=self._env,
policy=self._policy,
)
class GoalConditionedPathCollector(PathCollector):
def __init__(
self,
env,
policy,
max_num_epoch_paths_saved=None,
render=False,
render_kwargs=None,
observation_key="observation",
desired_goal_key="desired_goal",
representation_goal_key="representation_goal",
):
if render_kwargs is None:
render_kwargs = {}
self._env = env
self._policy = policy
self._max_num_epoch_paths_saved = max_num_epoch_paths_saved
self._render = render
self._render_kwargs = render_kwargs
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
self._observation_key = observation_key
self._desired_goal_key = desired_goal_key
self._representation_goal_key = representation_goal_key
self._num_steps_total = 0
self._num_paths_total = 0
def collect_new_paths(
self, max_path_length, num_steps, discard_incomplete_paths,
):
paths = []
num_steps_collected = 0
while num_steps_collected < num_steps:
max_path_length_this_loop = min( # Do not go over num_steps
max_path_length, num_steps - num_steps_collected,
)
path = multitask_rollout(
self._env,
self._policy,
max_path_length=max_path_length_this_loop,
render=self._render,
render_kwargs=self._render_kwargs,
observation_key=self._observation_key,
desired_goal_key=self._desired_goal_key,
representation_goal_key=self._representation_goal_key,
return_dict_obs=True,
)
path_len = len(path["actions"])
if (
path_len != max_path_length
and not path["terminals"][-1]
and discard_incomplete_paths
):
break
num_steps_collected += path_len
paths.append(path)
self._num_paths_total += len(paths)
self._num_steps_total += num_steps_collected
self._epoch_paths.extend(paths)
return paths
def get_epoch_paths(self):
return self._epoch_paths
def end_epoch(self, epoch):
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
def get_diagnostics(self):
path_lens = [len(path["actions"]) for path in self._epoch_paths]
stats = OrderedDict(
[
("num steps total", self._num_steps_total),
("num paths total", self._num_paths_total),
]
)
stats.update(
create_stats_ordered_dict(
"path length", path_lens, always_show_all_stats=True,
)
)
success = [path["env_infos"]["success"][-1] for path in self._epoch_paths]
stats["SuccessRate"] = sum(success) / len(success)
return stats
def get_snapshot(self):
return dict(
# env=self._env,
policy=self._policy,
observation_key=self._observation_key,
desired_goal_key=self._desired_goal_key,
)
class ParallelGoalConditionedPathCollector(GoalConditionedPathCollector):
def collect_new_paths(
self, max_path_length, num_steps, discard_incomplete_paths,
):
paths = []
num_steps_collected = 0
rollouts = None
obs_reset = None
while num_steps_collected < num_steps:
max_path_length_this_loop = min( # Do not go over num_steps
max_path_length, num_steps - num_steps_collected,
)
collected_paths, rollouts, obs_reset = vec_multitask_rollout(
self._env,
self._policy,
rollouts,
obs_reset,
max_path_length=max_path_length_this_loop,
render=self._render,
render_kwargs=self._render_kwargs,
observation_key=self._observation_key,
desired_goal_key=self._desired_goal_key,
representation_goal_key=self._representation_goal_key,
return_dict_obs=True,
)
paths_len = []
for path in collected_paths:
path_len = len(path["actions"])
paths_len.append(path_len)
num_steps_collected += path_len
paths.append(path)
i = np.argmax(paths_len)
if (
paths_len[i] != max_path_length
and not paths[i]["terminals"][-1]
and discard_incomplete_paths
):
break
self._num_paths_total += len(paths)
self._num_steps_total += num_steps_collected
self._epoch_paths.extend(paths)
return paths
class MultiAgentGoalConditionedPathCollector(GoalConditionedPathCollector):
def __init__(
self,
env,
policy,
max_num_epoch_paths_saved=None,
render=False,
render_kwargs=None,
observation_key="observation",
achieved_q_key="achieved_q",
desired_q_key="desired_q",
representation_goal_key="representation_goal",
):
if render_kwargs is None:
render_kwargs = {}
self._env = env
self._policy = policy
self._max_num_epoch_paths_saved = max_num_epoch_paths_saved
self._render = render
self._render_kwargs = render_kwargs
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
self._observation_key = observation_key
self._achieved_q_key = achieved_q_key
self._desired_q_key = desired_q_key
self._representation_goal_key = representation_goal_key
self._num_steps_total = 0
self._num_paths_total = 0
def collect_new_paths(
self, max_path_length, num_steps, discard_incomplete_paths,
):
paths = []
num_steps_collected = 0
while num_steps_collected < num_steps:
max_path_length_this_loop = min( # Do not go over num_steps
max_path_length, num_steps - num_steps_collected,
)
path_a, path_b = multiagent_multitask_rollout(
self._env,
self._policy,
max_path_length=max_path_length_this_loop,
render=self._render,
render_kwargs=self._render_kwargs,
observation_key=self._observation_key,
achieved_q_key=self._achieved_q_key,
desired_q_key=self._desired_q_key,
representation_goal_key=self._representation_goal_key,
)
for path in [path_a, path_b]:
path_len = len(path["actions"])
if path_len > 0:
num_steps_collected += path_len
paths.append(path)
self._num_paths_total += len(paths)
self._num_steps_total += num_steps_collected
self._epoch_paths.extend(paths)
return paths
def get_snapshot(self):
return dict(
# env=self._env,
policy=self._policy,
observation_key=self._observation_key,
achieved_q_key=self._achieved_q_key,
desired_q_key=self._desired_q_key,
)
| 34.511864
| 83
| 0.602593
| 1,144
| 10,181
| 4.891608
| 0.092657
| 0.067191
| 0.053431
| 0.040029
| 0.879199
| 0.853109
| 0.841673
| 0.834525
| 0.813974
| 0.813974
| 0
| 0.002627
| 0.326883
| 10,181
| 294
| 84
| 34.629252
| 0.81395
| 0.014144
| 0
| 0.775281
| 0
| 0
| 0.028617
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059925
| false
| 0
| 0.018727
| 0.018727
| 0.134831
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bdc5c98a5002facf815b4810f855515866fe9235
| 58
|
py
|
Python
|
pyDataHubImp/entities/satellites/__init__.py
|
bartcallaerts/Braindate_Tesla_Datahub
|
e5b3421eb228fca25f679004c140b883de8ac886
|
[
"MIT"
] | null | null | null |
pyDataHubImp/entities/satellites/__init__.py
|
bartcallaerts/Braindate_Tesla_Datahub
|
e5b3421eb228fca25f679004c140b883de8ac886
|
[
"MIT"
] | null | null | null |
pyDataHubImp/entities/satellites/__init__.py
|
bartcallaerts/Braindate_Tesla_Datahub
|
e5b3421eb228fca25f679004c140b883de8ac886
|
[
"MIT"
] | null | null | null |
from .ODS_Supplier_Details_S import ODS_Supplier_Details_S
| 58
| 58
| 0.931034
| 10
| 58
| 4.8
| 0.6
| 0.458333
| 0.75
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051724
| 58
| 1
| 58
| 58
| 0.872727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
bdcd1bc6b8215c84a638a09d6d383e9aeac6fbe3
| 17,554
|
py
|
Python
|
envision_table_to_html.py
|
hbuter-rubrik/rubrik-scripts-for-python
|
0e434854b778ff0f857425173e5cb7d6b83dddec
|
[
"MIT"
] | 5
|
2019-10-04T18:09:24.000Z
|
2020-08-25T04:46:01.000Z
|
envision_table_to_html.py
|
hbuter-rubrik/rubrik-scripts-for-python
|
0e434854b778ff0f857425173e5cb7d6b83dddec
|
[
"MIT"
] | 2
|
2020-01-07T18:25:11.000Z
|
2021-10-14T11:48:27.000Z
|
envision_table_to_html.py
|
hbuter-rubrik/rubrik-scripts-for-python
|
0e434854b778ff0f857425173e5cb7d6b83dddec
|
[
"MIT"
] | 6
|
2019-04-25T10:26:30.000Z
|
2021-11-18T08:20:50.000Z
|
from RubrikSession import RubrikSession
from getpass import getpass
import requests
import datetime
import re
#report_id = '44d1229d-6d22-418a-abfd-42100480439b'
report_id = 'b306ba74-240f-4a23-a51e-e86ca2175956'
BASE_HTML = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> \
<html xmlns="http://www.w3.org/1999/xhtml"> \
<head><meta http-equiv="Content-Type" content="text/html; charset=utf-8"> \
<meta name="viewport" content="width=device-width, initial-scale=1.0"> \
<link href="https://fonts.googleapis.com/css?family=Ubuntu:500" rel="stylesheet"></head> \
<body> \
<style> \
#leftcolumn dl{display:block;margin-left:20px;} \
#leftcolumn dt{font-size:120%;color:#000;margin:10px 0 0;padding:0;} \
#leftcolumn dt.imp strong{font-weight:normal;color:red;} \
#leftcolumn dd{margin:0;padding:0;} \
#hor-minimalist-b{font-family:"Ubuntu", sans-serif;font-size:14px;background:#fff;width:480px;border-collapse:collapse;text-align:left;margin:20px;} \
#hor-minimalist-b th{font-size:16px;font-weight:600;color:#000;border-bottom:2px solid #000;padding:10px 30px;} \
#hor-minimalist-b td{border-bottom:1px solid #000;color:#000;padding:6px 8px;} \
#hor-minimalist-b tbody tr:hover td{color:#000;} \
h2, h3, h4 { margin-top: 0.65em; margin-bottom: 0.23em; } \
</style> \
<font face="Ubuntu"> \
<img src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAOEAAABNCAYAAAEnajN5AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAAFxEAABcRAcom8z8AACtfSURBVHhe7X0JeBxHmbbNtRCOAAk5wMSxpo+R9g+XgUCANSws/OzCsuxjLZDYWJqZ7pnukXzkYDkW9C8QH/Fty4d8SLZl2dYx09ccGh2W7zsOBJNswk8WstgQnMQhUYxjy+r9vurqUc9MjzSSZVt25n2eerq7qrqqq75+666vxl1V3BWb9x68Mvpyk1y1hk/h9bLCYyxqZIwVJqOvBVNrsmpTI3UaEj5BfpHeusIvyHPo7QAm9jS81U4ha6wmV05tJddCMFikfkEyXCPFCCc2NLyVPqbBKe05EdfU1LyhvLz8jXhvXzFSn8/3TrxHoP1991W/iz6mU2r7H8fGVpdYWboqIwJOTZl8dKfpjexpolbjKgPyVLz6RZn4zb4GgvKzzmcbGKk/KC2ljxa80ZUcvc0AH92/g95mIDsyXyDUgVcbrpFm2RGwagux5JSkObnu2Jv56L5cTxSiVG1Om/bA2/1imPjBAGVZfkf2x9iws9c14rJo1ycxS/nIvk6MmFoXcWngDq42SyILb2G02h8z6vpXqPXlARub18TGVtQiXVhtvTlJ3fwlVtlRUEkEP0eK3uZFIGj9aAQeY77JGEtMNlVHLG/X625glO3Pc2rU5NRE7h+WhcEitP/QjAjLWmregWUrfUwDI+OUlJ8+WqVK9UCpYsOOsBwoQiwcsEucjAjtbJyoNLybWhHw0U4ofXanPbryCeBMYVCeSfz4/XPei1c7IrxmprJ5PfEwGAqJMNuPHQnaC4L0ILFEsOp2k08YJqclTF6BlEV2n+YjBzNedgYGtcNzQii82bqXU34h9Be/JH0IrouJBwpnCu8TZt8+Y8aMgYrD295lRQbZCJH9mVq/DgB/8Vvo7fUFtnOZ6Y3OvQkbCcgcrLc5bd2/M+rG01N6at5EvY0agLJvmTp1ztvo44iAtJ/2wANvp4/54TEW0sLOamoxRh0pGrA8KmtpeQujbj3Nqjug4Cu8CVYIkGBDtQ/dEBDDC7AFhvcQxpyMIsYNk+vq3jzONMfTxyHB6bHp9PaSMdJEOjFoIst6aqHmGPgtieQSy/+GOufAqlHaaQkFbSJldwV1SgOLRDQQaX9AlBfYdsSRIrPYxESGnifvhKrO48dmfzC6idJMEqZPlP8O7Zx+nIn0B2TFn53gO9UNPCaOhYR6O6M3Ueu84JUeqwiO7ttOrTKQnSDEUImE+uF5+phGhh+XMLMTiX7c/BGwWj00qbbcwmpboZ8FnEuoaY+cniR8JPdtO5+niSPu3shR1wDdIsq2cz5jIrHpTR8JsDDK8OMSZl5Jgl+fL8QTh2ywWsspVomc5HTjd1hDZ/yWJHH7l3BtB056246cpK+4AiI8RW8zAL/QaSFU1SeEqjPapv6gvNgfCHVWBqWfEHep+rxPCP8zdSaoFKScOCG8dDx+sepbkLhn6OM4XzBYATx3/Y5xjJoo45NWowok9xmSOOVgJz7zbUde8LYe+TneF1FEEZcXjDEvSW/HcZ2rTSY2MPzCGg2n6e21i0nGgm68ct1roEGwEswak41vIInkk1vP4XW04dYlHS6GDGNiyyO3eYwFjR5jSaNHX7rBShxIUF8HrZ6NJhfbeo5VGreyyvZGVt02k742KnCr84aLIcPw6I88yBiLIUFLiUdPbO0eO3Fsouk1tIOEQcOgGUykF59HCyNJ4JQpNW/yh6p+TB/zh8GoD8/1JBaZJerChxh9GfktqRMkaONBO3EITByntkEClV5O1bdxRirtdikYSQKhuXfI+d6QYbDttX+ht0OCi8br6O2oYCQJzMagYXAd2MMAzsXr+qlVXnCKVs8pCZM3uvMGiH04NI5R85wumt3PQ9gfh7/dvZL0HqebA+NFUXwzDhXT5/T4FyI7gWk3NlVrJY4WKIxRf4E4uICL6OswcZyCI/VdplfPHccOBOXv+ATpNezGQBuTuMPzk9D12Uc8ACqE8F3QNk2/ix8XEORfYeMZu1HkWaxKD7b7xfAWvyj9Gd3tMCGxNzob7M4E4j10y6xBfTa59qKztGQTW/MmkNXbf2klrhsMdKMU9wRm52ZBCQxW/YY+EqCdPdSBCbQTZiNfAjEToMEdJZY22GT9RVIVJJvyJs4GF+n8db7EITCBofCsDLdCEkhv04BE/Q7eIz0FTCDcZ/RG3BKIsx4+MaxSq0yw8caChxO46L4T9DYHVIIZXB5ZAuXt8Juft+5JAn9PHCjcEohdJWefkYBV1zdDVdDH6o0P4citPV+EYKOx+bzWfpY+jiOSi+4xuej+Xi5y5Del+uMXqVMaeRL4a4j8v+kjPsvZCSwvzxxEx4+HquBHeF9oAvEKCTyb/QcRsMrWB+16Dp+5qPpwukBRe0jFzkf3YgfY5CKH8lb0bgm0Js1kUxRn3oEfhve+oDSLOpOPI3ah0EfRL3R8TzslMZwEIoCvF51uBF6lZTKrtJ1klehJbzT+g4HS0ipQuOjeXpDcSa7t0Em+7WjeuRL4Fb/olJaNikDoc9hrx957QAxldJ7tnju4vYzuOApAHCj8Qelhf1DeSR8Jpk6d+jYocdO99+zeP44wBAJVk+jjAHhV/Q9yNbr7sSqwCpS9JDf46JGX8HpNg4smyMwygtfgl3SUll7t8VfpbRFFFFHEqONDqYU5M1iTtGW38rtxgmrpT6hVGmQJzzDmeoq4jJgUnxchSyk6lqQrDU5fdDMZdyNzVtjhgl5lDOet1v6QehnHqBtO40wI17F9VPr+RYwC2MTinpKW+TdyHVnCI51KXMeGHcv1JhfbhHN00MBteGqc6dpFH5OAtuYubK1De/OqtmqgrfkAfodcPSezUT1clBhz/91jLDCt+f1FdI7fWmPk0VbuYQyn8Kw5fzIyoNdCB3rbubIanPtvPGPNReKwI3bJcOixdcw2+3D+kvRuLnHK/VKBc6D4HTl93CEBdRfTsfBPE5oXkyERa0zYEp41dLochLg2I1CPvm4fq28gwiPruuJbX2MSifQSAEbZRhZwWF3OVjARuI+mu5WcYmzjWmPfoI9XHVdSiH4hROIiwhLlZdSaYMRCZLuWvOpJLOr1JJb0MrElFyapj3yJia/sZeK1YFaDWdPLdmx0XTDJ6Y0rJzQ35yz9YbXmnZyh9HKGCkbr5XSjl1O0p5movogzOuC5s5eL7ez9UCR1C33lquJKClEU697sF6TfB0Ly09QqjZEzkYJXF7yT68SiEuo8Y2W/2yLxkYJTYpvIghw9BYxsF6j1mMGVFOJgGLEQcXWUNbSf22DxxNb1DbZSaihwEX2jtZoKTbs1wKV0ml59N9zvTK/ZHQw+QfoyzeAXcNgxEKp6FSt/NFAcvUy9QQZIXVYGyK4rngVB8IB7fyBYlVOqZAlxfCAo+YVgFYkjCEaqmoPTDC8KQuir1hu5gMbRAgwDvuPAd4VqD4744ftS1WywC6cH00RRvAH8XRRCVWQ82InBhOgTwq3oJkjVmS1+NolCs4WHgszTYNE2/3/6SkHg2rTPcFqKCi+ZFp41iIfrj6zVY6XJ40P+cXQY1qpDQlUXAwHpu2hfKUhfIh4oQNhPoh/nALsTONhOMsExHm3DFqLDvCj6xE9TZ8j48KfB7oztHhCltdQpDRzatd1BQOcCgarPoX0gGPwm8UCBw70opJxhXYCbEP0Buc22A0FGqHUmJkVXcmxy3UVnVyHdYIk1XrgjFiNbm0YCVk09zisoOBxeHhAeDlZ6lb1bqbdBYQsRE51nZpFgNIQImW/WlJcPulYav4PGkzG8bQsxKM8adBa2ECGC6Q8I0jz0R4QXzCO8bNzZvJrn4pv7ifASTRcmFLCwv1Dwyp4nwBDhcW37Wql1QXAwcdDMGRUmCqEYtcoL+J56Eo5UnVEcDjBR+h21ckWBQkybgBCqp875wRobn4PuwhlWrz/DxRtfYo1NH2BjTdDPQ7MNzA5wi/yRes+ARzG+Yq9BdcKrdC3nIjvPcJFdYPaA2XcGhccru7bxkUNwf/hMqfboGV59Mr1DLh9GS4j+YPjz6D6YEAtp2ICf58j3CNKvqRWBLUT4jozZsmwMpziFuE6RZyghKgPhrxFP+cDvbO2d2FJ7G95bU4rYSbf6eTh3yif0jAh5VZ/Pk3W2tMGips5OPjawHY6P7DxtrbvFqcd9YA6YXORgup/IR4495Y0czd076YJChVgpyIvRH2aOc6GGDcig/0Z3yBi3BsVAnRgMn3IrttEOWPaMHQdYZYwNXw4h2oBi/gLaYyOpupAVcmy06SFbeDg3jB11PhEngbJR/afWIuk8DRZ1zyu4t5GL7H7BEt5+k48cAHMIitEjIxqxKVSIiEBA/iFmAPrHDWHp1iHNGH9Qepx6zQBkfAeN48+VovyEncnYMsUw8J6EAYyAMLroaxm4nEJEzAhIk7HOpt/x6mDtA1dwauwnpa2Jj/J6R/9grU2s80BwcV7ZacCve9XHTqdNm/b28vKRb+qqgXcxDPp4bYJRtO+RDYTxrvSfUarG7vbqu7KEtxcXNqT7QnzbodN85IjpVR+79tcBXC8oa27Oaamyrbs+5zUO93HRfT3UKo0Jzc9e0i68IooooogiirhWMLlOLKpIGfNoaXnj5GN1OYJijaVtbPsa055AdgLVmNDbIq4m7jTm/rMnNu+cR59P9urZYPUlLYyBsx0rTDaxNkOIOO/o3bnDZI1NG0ZzDrKIYWJKTc2b2M5FfbiyrURf0E6txzHGsm228KypqlUmE1+XFiLO6rNGAwhws8kq9QNaaoq48pikPXwrk1rcx+iPEAFawkPB2cLDecY1YOpAiBuJEFGAXLzRZLVNtSSQawC4q7agscfLDNzkR79j9NbmfnDbgveXGAuaPcZSqqHQRXhknnGDycbqzclQ/4HwVtHXrwmEZ95PZugrK6vfR62uCuTq+8l4bKWUOak9LHjic5dYW7gXglnU6Ikt3TopUvcJNrUamOgoNtPCw4liXNkGQow1mJ5o/Te42NbNrLq1kWzzBsMZrU1lJ3KnpMYKBKmaDCoHAmFXtYtXCvYAtiBVjVyATHKR6TEeMRmypnQxmKXwvKyTSSyfwLavBSHmER7Ue4zasH5KT8+bWKMJ7MCQ9aTQoNFbzRIt8vc0ijGH60yAKDRcT2rpGaBrSkkdWKIvu4NN4BINR7GZFt7G9eiHNGJ0FJy9GLjF5PQohBGdgu5jEVdSgD6f+HEQ0vaKYJClVmmMSIAlxsMLPfpcsvkUwSZBaJR5lvBIvZduhXLauklsvOHigPDqobXZsJE6UwGi4HCimM4z6kpagJyu38wnOv5EPI8RXCkB1tTUvCVE5ywlqOvAKqOxMnwBmjVvYJKPmJ74QugqPPJ9tGKTILQM4a00PVptxh5YLrVlEpfYdNFi3qZN1JrAEmALFZ61mpvTNQhHn+KNRm/iE51kkphVk/9EX7nquFICLC8Xb8TJZxQSTvKKYuZI1rAFyGgP/4O1kntxL9uxvJeJzStnEyteslZyryIruZn42l5GW5OhIBTBK9vv9GgN/48+DgDqQC6h9Q6s5tZ7uXiilzWMD/DJrvOcjqu5u3o5JfUH+sZVxxUtQgOhuQEx/JxPkGRqlcYl14GMtmRxib4yRB9HDbgoyhtLpRfsjjVcF40YRl+ykoywxGpNj7ryfmp9ycBJYW+8gxSbvJY659w8M1ZwzQuQMZYsZ2KOTnp8LbQy186mziMGk2h8lzfZBcKLg8G1Ne1QH3a/VtZyouA+IS7oca4+g3rjBlTeP2dOpg5e9OO2Ss2GaZrj87m7CRD1RuOiJBylKYfGB7XOi+z4Uc0Rfqfbu3m/YxABZoefhkdfspR1Co+OsLDx9WaJunbEutaYxCEqvIFVbZzSYaIacq+6+1yhQoTK/nkhFIZESV/wC9LDuHkSRytw1CIgy39L/IBQQQC4NyHvruKAKD+HKpsqgi51DxVgMDiT9fv97w0E5aN2PGS1G9z7Q3LUTQe7DQgDteNenD49fBPEo+AKOXwXR3mcjRW/IB9EIfnE0AZqlUY+AVZXV78PwiZLDiEdA6NcTGzpopyBaWcnPdaAXYUq6r1goO54nhabaeYR4dlL8XtMXt19thAhYiZg5oLwHsWrbSCx6aWHyBRMvNvyPRt25mAjglqlYQvQF5QrHEsT+yG8v+IV3dAuCG4VFbn9N4QIrUv0Bw2U39D3iYH7DDU90Hj5C7pDen5BrdJwE+C3Z868FfPAsq8+7zw6BAToZJ7bCAuY5BZzQvvwluFzUaPfKjbpguBs4UVxhdseMHtdV1g7YX88MUHpsVkzZr27vLzljRVC+GPUy6gIEN0ww8FfP7BkDXUigMyutd/HfpyTUTZQgOnvFKUoFv34rVCUkoXUNoYjwHA4fJPd7RCkmedzVFl6YisPD2y3zhqYRqNvNBmlIVOZWQHgWhMfstaU2sWmcz0pFZ5x+HxZS0/eIsmGLUDI3D5qlYPREiD6me4LfZRaZwB/GMooXNCbMT+KcAjwt9TKFYUKENenirL1XZCuC85DfDLgia0+whjW1rOMsU0QHqttUqi3YaNMSX3Eq++k+qOymAfCu10/VtCsvS1ASHj64KBsjJYAfaJMtFfng0+UDJLBLnsvbAGKklROrVxRiAArA6GvYVqsuKouiOXijdSLOzx63S8Y3cE8A4SnbjKo84jBtCY+6tV3gxAHhOeNHTl/a+oXBa9+TjNQlB+iVjkYrTowEKieQK1cIQjC7bSYxWL0DmpNYNeBfn/wk9TKFQUxEH4Q4keUL6JST+o8OFi9/jhReADCY7RNQ27DKhTeyO6PebV9IESLecMRHgIFiBlTGZC/Q61yMBoCREMfB8P4oExLBDGc3iiKQAFiHK4q7RwoRIC2gXT333fffYVPNIMAD15KsZkPnpadk0sTj56dXFdYsemELUCfEPo2tcrBqAgQ3OF2qJlwEKDFtApB+gS1I7AFWFkplVArVxQiQPBzDoWH92gH/T/31jqrrr3b2je48QyrNbxUotbPYo3GE6y+le4b3A6m+QyjtU6jr6TxoS1b3s6q6t30cQAtLW/kNdwviPsGd4PZe4bXD50hw2nasb9wbdbeQb71yKP0jUExAgG6CiGdOYMVoVWDsycYDN4ZwD4phuWfeSu1JhhNAQrV1VOgKP482SGFfoPhV1w78bhckINuAqtvMlllE9ErzMVxHs+p3IfMLGTMRtyu6zd44ymTj3eajJq4j1oTTGg+8Dbo4w00WHDrmXrA5NsOfaG09ejdXvXRfjycxNt2RKSvDIpCBIjrSDDxWHdUVEgfpNYZsOuVSkHKGYBHAZKMEqU91MoVAdHamoZxUas0RlWAtBsRCIghTLv1bfKviKdscEpDK6tuTguIi6HQHJOxaitcI+n5wMkgPD42oOiAN3ZCw6c7XT9ZAsQNn1R4uG9QOWR6o0fIfKA3+uinS9ufzquDNhuFCBCHyez+UqUoL6TWafh88kQ7cwKSNJlap2ELkPgJVn2GWmcgEJAm2y1DEMI2ap3G5RAgohL6pDiCRL5dkIeu3rgYCsyejE1rZyICRObhsT8ZY5vQz/Nqu0BAlhAtAaLgBnbt8srhtACHi0IEiIAm/mFbCJWhcBj7TXjeWECamd4sKUpVrn1JW4AYDwrJJ1bNnQGdcFRIjMUzZOAPBubxZrrq8r5cAkT4g/Iu+/ugNV5Drd3BxfC0AsdkrBqF4lVJofB4AwWWPcKCGz+7Ta8BwtJ2lU9s6HkrKTJt4UUOggChyIwev6wCRASl6l77b8X30Nisgcy5KElSKfWaAbsO9IfC94L/PvRvj2XaQ2skDGnmOZ/vgYn0tQwUKkAowl8m4Q1DgAioD39P3DCOQDhDxUkGoMHyBKu0nrQ1VoPwTrFRVeChzht0bBPqPK9xCIS/95te7fD/cBHUaH0QzOGTperxU8DMEWnFgIx9Fj78JHSyv0itBkUgWB2D+u4sJLQP3kV13K+hZonK6sq8SwbB/0kQ/CkcokPmQXy/xffwfQwHBHwW/vyMg36zAcw8CZl8Chh7M7VyBQhuN2rUrgxmDtch4P0/+AX5VLCq6sPUKgPAxGfwXZ8YeoJaDQ0ccOai+iJWTTzFqSg4d+HRfl7fB9V974eW5r/Q168acCgKi0D6OCLg+1Ozpq2uKZBV1nrCZKMxUgeC4H7Dkb3yVrGZMcJiHLkwsSF2G9aBpcZjJtt8OD/Ni7gCqKl5g1VsxkCA8XQrlFe6f8MrmQPT3tjhvpKUpa2QNGKUY2ap8bhZ2vpoQcVeEZcJvBLfxBtd51ilI6MfyCs9v+UV7OthsXmkb5J2MN2pRQF6tV9Av+/Yrz+x+fCQp3MWcQWAq63pbRpeZe8zfOzIebb5wAeoVRpM46GrvlGkiCKKKKKIIooooogiiiiiiLEIPIrLE18eYLRl5W4aZ4oooohC0VPzpolKzbs9yflfYVKLFnm0eYOeyVMSWXgLagziunBFMW6LWmkyiVUmo6348aBkrKl5A6tunMloG37OqJs/yqsb3jkWtM0WUcTVxHiPMXct07HQZLsWmZ7EApPpWGyWGPNdjyfGcwwZY2kz1w3kI5tr7A021kElbHyNySbqTFZf9SM3MuI0F6Otfx63eHOJLSbftQPe2fIqE998L/VSRBGvP3wgMX+CR5/f6Uku7PPEHgESLkXdCempRIRFvmXbyEGiRPFTJvmsTVK418beprgOyLjBZJW1P5zYM6A7j5BQ3XiaNTaZqJaNizeeY6L1K4pN2SKKALDG4g94jEe6+Z7aPo++kOz58Ubn3sQaSxst8uXWfBb5nJvd6J4pfZ21b8qoN7kYKjZZ/30kI7N8+d8wasOLXHvTOVSth/oVSeRFjBoCQXmqEApv8wfllkCwqs0flL437FMsriMEg9VlgWB4I+YHGMgPeUVlZf7lcpcb4/Gcc6ax5l142qSbQffStp9NZBILy/Dcc24nkMqVfM7az0k+e9OivfcNNy3CfQKP3t7Yz+mNX+TVzR9HbQrYH+RV1dUwjY3vKmqfHRl8gtThXPkMz2dmzBC91Pl1B19Aut9ewGvniT8Y/gfqfGUxyXhY4nYtN9nupdD/WwZmucl1rQCzEkwtNatMfs8G06Mv2VaSWngLkK+HTa7qY4xs4rnVfEi8TPKhf9L/i0PNp2yqfV9PyztYbctJfqdi8h0RaqImn4LnlApGA6NDXzEOtWfLeVZp+Qj9/CIKhF+QU5kklF+82jp1ribw3KtsErqtpr8igH7fg7aK0YFj66m2SjwBnejNA4KmkGiW1krERGXpnR5j1W62fR2QEYnn0uzMR75Y0zlGq19nH2to9QkboU+Iu+JQTam9Ow736KCxtV7iPp3W3rGs+XKs4nomITarxTz6o/LhipMQm5Oe9rmM20CHR38ESIhkswmYST6qMhZIiCQb0Dxq485Y7cQSbc1+tn0jkNGdfHhkM5KPTTS9xuqbNk7JalISEipbT7M6Eg7JZxNwYJejpbk0Cs8RVxKWtNTd6NE0xnnEZREDuF5JKAjSFNzaZO1Hm032o0Hanpg+RNquKAknKjVQY819ielaYnoSC81J2oIfOsmIJLQ0xVLyOYhn9fesPh+bwhquNoeENiYqa+5k9A37ufimi6xe76j54D4J5FPq6/OpO7RIuB1IiHvCkXjNlHiUfHSzI6er4KYACfU0Ce+Kxd7DKbHt3vZu05vaZfJKorvsROEa+V4vuB5J+O1v+28NiPJ/OcmEhqRTkI4OViteMRLyyXl3euLzTzLt0NyMQXMTpxqSi6FGWmx6tIU/mNhT89ZJ+qL72dRKk4khEYF4ONVADNhhnw9VehmrTbYD+oTGatd5QieY6CYPvHOATzWe45LbzjJqfQPGQ51dYTVHdzzPGkA2DUin4dU2QD5NAaMCCQ2TVdRXS3T9s1yTfrNNPk5LAkkt403uMtloeztqIKbBFwG4XmvCgFj1kIgbeSFNtgFyvVzhl75FvbjiipHwjti893hi88QSbf7sEnXerBJ1ATWLZkHNN5vRf/71kviKz3j0Fd9i9CVzStRl4IZmJZhV9ApGWTm7RKkN3tU0rzANUiNAidHGMtG2WYwanUWuUbw67tvUWXxUncVFtW/wsdg9TMS4j1Njc5i2OLihSRLDRVJzPNHkv5W3tLjqZ3294noloY0ZM+TbUBXttGkhsuV1KFwxEuYDTjswsaW1XAc0F5NQ28VXmh5l+YNjefi/pKXlRk7RN6NOYTyvm9VT51klFhyLGr3HIq53Eg4XV42EhHzG0hVkpJOo/x2Y52Pjq02Pvrqf0VbPHktkvKup6T1c1GggCr1V1Jtim6TJ653QFO0EMnaKw9HM/npEkYSZuOIkRPKx+uJlHJJvqBUu8XXgZ93FEnXNrIkNg/flLic+rPS8G2q6jUQhu5ZJPkv/DRpLTbRX7zG9SvdfOaXLP9pknDGj5q2CIH2iIhj8R39Q/kdBkO+eOnUq6fDjsHilJE3xC9U/A7ftPlHaFhBCjYFQ1X+gjtKa3JU/4/1+mZkhyv8XwwN/fy/Mnn07dSsIGKYghD9WKYS+WgHfUynKnx1MQ7+NoUiIajL9fv+tflH6ul+q+p4gSg3wTswnSHv8Yrg5IIZXBkRJrIT0Q7pvdNXIWABmyPJtqAgK018RCH8FTx8YN84kWivxFALU8iUE5TXwndsh3m2CGF6H8aL2SfDipt1yPOp9he/8MoaJYc+YMSND97kbRkpC/5w578UzIytDoa+igXi/gE1h6pwLMjVhLF7iVvMNucIlthEP+rzIauuqr2STr6yl5R1sVFvvxbO3SM1nq3xzEpDqaE/rMrKUUXn1vaZX2/tXXumpGC0ywk85DY+UIEICodEf2FcZlBejXj2nIJ0ChevL06b5/g8NhgB/XiDqM6gWDv0QvXxB+Sh1LgjTgsEP22rprDBwSF5aRJ3zwp2EVZNEscoLP3sCw7R15A6kxXlvGUwvTgWgOlZ4XnWfIGAhMpT+3TR8wfCOIP1+zAcI4w8iFEr+oHQApxcy47cM8Qff71KoWXkqSK/ZssE0wPOT1DkvhkvCUOiBW3xCWHXmvZUXsy5AgbjYdQkgi2e9pGqBSG7ks4k39AoXLl4PNePaC6yyXkQ9vzT4UQdqOOVUba032Z3T7BwgHiWfg3iWsXVSUXV++gE0f2WUvZe8CyMQlL+DGZ/zU+DPYh038rJfkJ4GcwgIdgL8P+8LSE9iiU+DSIP+ME/awsdwoOQe8lQOJyqE8F3OnwdV5LrpN86Gk4TUoHrBl1F/Iz7Tn+ocfM+fwByG72z2+6Wl4IY1/EG0R3f05wjDxLNzAqLck33yRj4AsbfYcaLB8Oz8DASrLsB3Pof5iPmJ+QpuL/mFcAO0AVzXudI8/Yv9XRZhc3VMZqNQEn5HvP9mCD9qky/tV555HlohLdMemJZf059HW3qETay6SKYa3Jqdha5wwcMpEtvOc1pDvXPnw2gDatsJUOsd4o3OfjLd4NLstGq+/OTjo1S3mLKPnJ3AR/cuocGPGG4khGakGRCkx93OaB0MY4yE9vv9+E1gvky9DgqfUPVPUIs+bf+QaPAe8umvFaHwV6i3vMgmIX3/InzfhpqaPJrjB8HlIqHP99A7oZVSZ2sRHvBTfR66Gq3VhR5V4NFqGY++8jibrAMyutV8YHLIB1cknwG1YKIJasAG7Y5Y02WbmsgGr8Tv4qKJE7zRBWTEA7qc5LMJOAj54kdf46L7miYceLag5UtDIZuEKAjotzyFx9dRLwVjrJEQCpPz2NymXoYFPCcEvuNsOiwkgSBd8PurBj1YJZuEJA+E8HLqPGyMJgkhnC9AeDf4xNASDMcOE694NhaQUkGFwzSI4QHJyBh1R9nEhn64Avlym514JTUfnuwD5GO0Bq2ks2XwY4EuI1gl9RFO7Tzh1Xr6ecUmXn7y4cEzfGTvjkKPfSoU2STE/gw0nX5GnYeFsUJC/KnA9I2UgDYCodDX4Fsu2t9C07Oruro67xiCk4TWTx4+5/OJf0edh43RIiGYPghnLzSJ4bsG7CG8C5WipFU7Txa8FHB6nZfR1x8jp82TM9DQOJqd8a0XWLXBeH9085hRB+tt2TmZU3b9yqvtBTIi8bLJdwianfu2D/fkp0KRQ0L4gQJieESH4I4VEpL3rR+14AEVN5ARVVFOkB8fwqVEOOPzhTIGpJxwkhC/o1IInZw+PTDiqZJRJGGGscKT/nyfL8TTV0YXTPOaMkbf8EsuUXeRjdVDs3Mr1Hxb4kxi41Xb1DgU+NaOj3OR3U94tQP9vLofFYKf55X9zfyGfe4nMI4SnCSkgoY+1NCHZbhhrJCQxBuQNlGnSwKkp5a0Duj3kLwKhj9PnXOQTUJ4//dDHeoxGEabhHY4tsFj8SD8RXj6Nn11eJhcV/fmktTqWya21N42YBqI4fSmm7m2Bq9X3/RlXtl+Jz7bbm6mJLXllmFNVeBI6jB2buPRrxNbeiCuWJZBO8uUtR94L9t24Et8256fcs2pSUzi+Puc7sRPy+HbvNHDN40zrbmnS8VYI6EQnjNaNeGvRjrXZwPScwOEt0uUBpqXkJ4XKiuDZdRLDsYqCen9BkGQ7oH3MxaFk/ugDM1uaZrbNMmgmBSp+wTUeK9xSdTbgmYL1nom3779IqNu7p6krb+VNTY/wHe2gdsOMM0mF2uhphUM2MciYKIm3xEzOaVl0CNtEeQ8LlVbyOnJPk5JvOhp06fb+wbzAU9G4CJdL3tjUMPhXJ++Dwzek6kGMAfBHMIRT2iG7r9Qqh68m4kc+hSQ7bel8V+C23HTq6F5zCyLnTD5tqO/KuQ850JwOUmIBALhPv7QQw8VXJv7xapy8l76Bxk+Ccn0igg/lRAOUOcRwR8K3escPUQyBkQpPhi5xyoJ8X34rvSxKngcFuTTS04/GDZ0RZ7FBRLgpfBCntM3fpbV6s+ysS1kwIVVN3XcEWlMr9Jg9S0PIgFZLXszrbWXzzZ8QoNra96tTDjPxyraPN5I0hUu1iS7tbQsdZaJJqbl2+tHzpeJ7DzNq9DPc/b5yKF59sF5YNSDJhc90Os8PM/bvO/TfOTI06XaL/pLdSBk5OiTZbHDBc1ZFYLRJCGUom/xwU9KfhQank+QX8U9cdTLUBhfKUhb7fcHwghNp+55kUFCapDAlUHpu8OtEXFiWoA8CIjyBfvnR+MLyK9WCMJd1JsrxjIJ3eYJIeyfglvG/Cj+Dzg3Ons4q5087VsYTt18f8mW1TkrzFll64NY82We9TpAQHtPH58w4Hng+FAbpOaL6g/zsfYM8mVPtPOxHiBR16tspOvb2WS0SLgLSGgfN2oTkJIPjx6lp1dykYMZJLQB5PuYt+3o/WU9J0alBrQxmiREQJPmXwOh8AVboDTMM9CM+2K+UUU8FhVP0QwI0monAclPLIYfe2CwCWMKNxKiIStMgvIzIpTu2LwcZ+Yt4cdPnTrnbf5g1SchDU85a0D8Dsinlyor5dyDnLNwrZEQgUfnAek0+G5yoKXtH/OgQpDrCt3Nnxessh1IiKeFZtZ+2Rtq+UQM7K0jYBEW+VQgX2rItZ3OSXavgeTa+SoX7f63yXUWGS0S7gYSItmyiGefPho5BOEcARIediXh5cJokxABP/HXIaxe+8dBgz8PGQQISiegSbcF7JZBU3UFEEQHor2QvaQLarF++Nl2FzpfmU1CqIFxjvA5CJMes15tn5L6Z/jhOpDwYJbD8zJfMKxDM+yP6J5dC8OP2EdqdyRwAbgWSWjD7/czQjD8mLM7QN7DQkiU4FPEkWl1AOLN5BKKyepARDCoxwXVSHC6QnayW0Yz+WS7Cc1NDQkD1//EMynJZtp0zYf3AzXfUJPs2Ofj1L2vQPOzHGtGLrrvFA/9PmxyWgZJd5ga6Asqx0i/D5qevXzr8Xvo5192BOWq79gEIJlNruFLIiECm3So8Qt+yj8COUizDo0tXKex3dCIoeqz0Fc55AvlnwZwA/yoHfjzYxjWzy+/GJ4evqmysurDSGawS2+MpX3GvN8BfvtIf0mQa3HRNY2iIEB6tzrXjgKB/+dSSVgphF6xZWPV0NJx6pwXUKjcj/E70gSF0OAktDE9EPoavPOC/S4aTBPk6bN0Le3wwTU13eyNRrnStjbWzZRpGuPZvv2D3mTydi4aO+5NdAHJ3Go/J/mcS8zyTbLvNUvbH+vjI/s2MInEu7jm3ZNKjYMQZ7Y5Tow3epjDQoB+9hUCkMUnTwyEw1xw5kzWJ8uuZ2hfKrCz7xND/wlk2QY/Zg8avxja5Q+EUiDcOngO+Xzh91Pvw0YNzunJMlMRnMliWu69V8pZBYXrXQPBKj+JT5CSGL/1HVJ3QJB3+ITgT+mgxIiBTTfnd1RWV1/ytJjfP+e9GJYd5mCLBWy0lLe8sVKSSux3vhuck3MK51AQZ868wxkvFmrUaXSBqic4Vf+Jt7PHZKL6HrTjW+Mf57TUf/EG7vFD4uU2O4ciHx78yyn7O3DaAacToMl5jlcOvcLt2P8vdjO1iCJe10Dy8ar+I06Lm7yeNPl4Z8aJ2ghG6/gUp3U9zes7+y3SDVXzQV8vdqSPV/anSrZYJ28jSJ+w7eBpXjkKzdTHTG/00Ze8zce+WSRjEa9b4KQ8FzU6+VT3eTLooiaAhN0mG0m6KnriW9vv4dWe33p1HNVE0rnVfEC+6N5ub2R3TpsZSci1HnreqzwKBDxulirHzbLkU+dK247+lHopoojXL3il4y62LVHL612n+EjHAmrtCl7pvser7gEy7gfiWeTjjSN9XHTv7knawVuptxzgifl89EiHN3rsaW/Lse/jqhfqVEQRRYwEHmX/Pdjf49UDW9xqviKKKKKIIooooogirm+MG/e/PqMGH6eXOQMAAAAASUVORK5CYII="> \
<h2> Second & Third Run - Protection Details Report </h2> \
<font size="4"><b>Report Run Time: July 19, 2017 </b></font> \
<br> \
<table id="hor-minimalist-b" summary="Job Summary Details"><tr>'
#Ask Initial Questions
rubrik_mgmt_ip = raw_input("Enter Rubrik MGMT ip: ")
rubrik_user = raw_input("Enter in Rubrik user: ")
rubrik_password = getpass("Enter in Rubrik password: ")
#Instantiate Rubrik Session
rubrik = RubrikSession(rubrik_mgmt_ip, rubrik_user, rubrik_password)
#Get Table Data from Report
params = {}
params['limit'] = '9999'
table_data = rubrik.get_envision_table(report_id, params)
#Add Row Header
for header in table_data['columns']:
BASE_HTML += '<th>' + header + '</th>'
#Add Rows
for row in table_data['data']:
row_string = ""
for value in row:
index = row.index(value)
column_title = table_data['columns'][index]
if re.findall('Percent', column_title):
value = "{:.1%}".format(float(value))
row_string += '<th>' + value + '</th>'
BASE_HTML += '<tr>' + row_string + '</tr>'
BASE_HTML += '</table>'
#Output File
with open('daily_report_2.html', 'w+') as f:
f.write(BASE_HTML)
f.close()
print 'hello'
| 240.465753
| 14,984
| 0.928506
| 839
| 17,554
| 19.38975
| 0.75447
| 0.002459
| 0.004303
| 0.001475
| 0.002705
| 0
| 0
| 0
| 0
| 0
| 0
| 0.143205
| 0.018628
| 17,554
| 72
| 14,985
| 243.805556
| 0.801126
| 0.008887
| 0
| 0
| 0
| 0.211538
| 0.88993
| 0.862959
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0.057692
| 0.096154
| null | null | 0.019231
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
bdeac09ff21f51cca86fee602e48a8abed17032f
| 114
|
py
|
Python
|
ramda/inc_test.py
|
Rafi993/pyramda
|
4fa7fe28d5eaa798b702d28bdd3948515cb88f48
|
[
"MIT"
] | 56
|
2018-08-06T08:44:58.000Z
|
2022-03-17T09:49:03.000Z
|
ramda/inc_test.py
|
Rafi993/pyramda
|
4fa7fe28d5eaa798b702d28bdd3948515cb88f48
|
[
"MIT"
] | 28
|
2019-06-17T11:09:52.000Z
|
2022-02-18T16:59:21.000Z
|
ramda/inc_test.py
|
slavaGanzin/pyramda
|
4fa7fe28d5eaa798b702d28bdd3948515cb88f48
|
[
"MIT"
] | 5
|
2019-09-18T09:24:38.000Z
|
2021-07-21T08:40:23.000Z
|
from .inc import inc
from ramda.private.asserts import assert_equal
def inc_test():
assert_equal(inc(5), 6)
| 16.285714
| 46
| 0.745614
| 19
| 114
| 4.315789
| 0.631579
| 0.268293
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 0.157895
| 114
| 6
| 47
| 19
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
bdf2ce3752f9f99c957e35f5e5f579560f2000ee
| 926
|
py
|
Python
|
src/finitestate/firmware/schemas/schema_products.py
|
FiniteStateInc/clearcode-toolkit
|
521c3a2ab9d9fa6d7b9059227c6af9d09b031c33
|
[
"Apache-2.0"
] | null | null | null |
src/finitestate/firmware/schemas/schema_products.py
|
FiniteStateInc/clearcode-toolkit
|
521c3a2ab9d9fa6d7b9059227c6af9d09b031c33
|
[
"Apache-2.0"
] | null | null | null |
src/finitestate/firmware/schemas/schema_products.py
|
FiniteStateInc/clearcode-toolkit
|
521c3a2ab9d9fa6d7b9059227c6af9d09b031c33
|
[
"Apache-2.0"
] | 1
|
2020-12-22T16:51:40.000Z
|
2020-12-22T16:51:40.000Z
|
import pyspark.sql.types
products_schema = pyspark.sql.types.StructType([
pyspark.sql.types.StructField('firmware_sha256', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('product_model_id', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('product_model_name', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('product_brand_name', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('product_family_name', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('product_model_firmware_version', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('product_model_firmware_id', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('product_brand_id', pyspark.sql.types.StringType()),
pyspark.sql.types.StructField('product_categories', pyspark.sql.types.ArrayType(pyspark.sql.types.StringType())),
])
| 61.733333
| 117
| 0.776458
| 113
| 926
| 6.19469
| 0.176991
| 0.3
| 0.45
| 0.334286
| 0.754286
| 0.754286
| 0.754286
| 0.754286
| 0.754286
| 0.754286
| 0
| 0.003488
| 0.071274
| 926
| 14
| 118
| 66.142857
| 0.810465
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 0.059459
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
da40513dd6041841a20c55f294eb7af000b0fabe
| 1,107
|
py
|
Python
|
multiworld/envs/goals/multi_domain/push_door_v1.py
|
Neo-X/R_multiworld
|
839513a48ddf2f5ae2eadc43435ac6981ddea1f4
|
[
"MIT"
] | null | null | null |
multiworld/envs/goals/multi_domain/push_door_v1.py
|
Neo-X/R_multiworld
|
839513a48ddf2f5ae2eadc43435ac6981ddea1f4
|
[
"MIT"
] | null | null | null |
multiworld/envs/goals/multi_domain/push_door_v1.py
|
Neo-X/R_multiworld
|
839513a48ddf2f5ae2eadc43435ac6981ddea1f4
|
[
"MIT"
] | 2
|
2020-06-02T05:24:03.000Z
|
2020-07-07T17:01:42.000Z
|
#import joblib
import pickle
import numpy as np
tasks = [# Pushing
{'task': 'push' , 'obj_init_pos': np.array([0, 0.6 , 0.02]) , 'goal_pos': np.array([0, 0.81, 0.02]) , 'door_pos': np.array([0, 1.0, 0.3])} ,
{'task': 'push' , 'obj_init_pos': np.array([0, 0.6 , 0.02]) , 'goal_pos': np.array([-0.15, 0.77 , 0.02]) , 'door_pos': np.array([0, 1.0, 0.3]) } ,
{'task': 'push' , 'obj_init_pos': np.array([0, 0.6 , 0.02]) , 'goal_pos': np.array([0.15, 0.77 , 0.02]) , 'door_pos': np.array([0, 1.0, 0.3]) } ,
#Door
{'task': 'door' , 'door_pos': np.array([0, 1.0, 0.3]) , 'padded_target_angle': np.array([0.29 , 0, 0]) , 'obj_init_pos': np.array([0, 0.6 , 0.02]) } ,
{'task': 'door' , 'door_pos': np.array([0, 1.0, 0.3]) , 'padded_target_angle': np.array([0.6, 0 , 0] ) , 'obj_init_pos': np.array([0, 0.6 , 0.02]) } ,
{'task': 'door' , 'door_pos': np.array([0, 1.0, 0.3]) , 'padded_target_angle': np.array([0.87 , 0 ,0]) , 'obj_init_pos': np.array([0, 0.6 , 0.02]) }
]
#joblib.dump(tasks , 'push_door_v1.pkl')
fobj = open('push_door_v1.pkl' , 'wb')
pickle.dump(tasks, fobj)
| 58.263158
| 155
| 0.550136
| 211
| 1,107
| 2.739336
| 0.165877
| 0.217993
| 0.249135
| 0.285467
| 0.771626
| 0.769896
| 0.769896
| 0.769896
| 0.769896
| 0.769896
| 0
| 0.116685
| 0.171635
| 1,107
| 18
| 156
| 61.5
| 0.513631
| 0.058717
| 0
| 0
| 0
| 0
| 0.257473
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
da7709ed718c5afbeb28c76bfeff652c8baad9ba
| 108
|
py
|
Python
|
mead/tf/__init__.py
|
sagnik/baseline
|
8d75616e04c1cca509dbebbb6d08ad7e1a7b9f88
|
[
"Apache-2.0"
] | 241
|
2016-04-25T20:02:31.000Z
|
2019-09-03T05:44:09.000Z
|
mead/tf/__init__.py
|
sagnik/baseline
|
8d75616e04c1cca509dbebbb6d08ad7e1a7b9f88
|
[
"Apache-2.0"
] | 131
|
2019-10-12T10:53:17.000Z
|
2021-12-03T19:52:47.000Z
|
mead/tf/__init__.py
|
sagnik/baseline
|
8d75616e04c1cca509dbebbb6d08ad7e1a7b9f88
|
[
"Apache-2.0"
] | 75
|
2016-06-28T01:18:58.000Z
|
2019-08-29T06:47:22.000Z
|
from mead.tf.exporters import *
from mead.tf.preproc_exporters import *
from mead.tf.preprocessors import *
| 27
| 39
| 0.805556
| 16
| 108
| 5.375
| 0.4375
| 0.27907
| 0.348837
| 0.534884
| 0.581395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 108
| 3
| 40
| 36
| 0.895833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
e54868497af7035575872a2275687bff2bc1606b
| 12,712
|
py
|
Python
|
src/tests/test_pagure_flask_ui_issues_private.py
|
yifengyou/learn-pagure
|
e54ba955368918c92ad2be6347b53bb2c24a228c
|
[
"Unlicense"
] | null | null | null |
src/tests/test_pagure_flask_ui_issues_private.py
|
yifengyou/learn-pagure
|
e54ba955368918c92ad2be6347b53bb2c24a228c
|
[
"Unlicense"
] | null | null | null |
src/tests/test_pagure_flask_ui_issues_private.py
|
yifengyou/learn-pagure
|
e54ba955368918c92ad2be6347b53bb2c24a228c
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
(c) 2018 - Copyright Red Hat Inc
Authors:
Pierre-Yves Chibon <pingou@pingoured.fr>
"""
from __future__ import unicode_literals, absolute_import
import unittest
import sys
import os
from mock import patch, MagicMock
sys.path.insert(
0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
)
import pagure.lib.query # noqa
import tests # noqa
class PagureFlaskIssuesPrivatetests(tests.Modeltests):
"""Tests for flask issues controller of pagure with private tickets"""
@patch("pagure.lib.notify.send_email", MagicMock(return_value=True))
def setUp(self):
""" Set up the environnment, ran before every tests. """
super(PagureFlaskIssuesPrivatetests, self).setUp()
# Create a 3rd user
item = pagure.lib.model.User(
user="random",
fullname="Random user",
password="foo",
default_email="random@bar.com",
)
self.session.add(item)
item = pagure.lib.model.UserEmail(user_id=3, email="random@bar.com")
self.session.add(item)
self.session.commit()
tests.create_projects(self.session)
tests.create_projects_git(os.path.join(self.path, "repos"))
repo = pagure.lib.query.get_authorized_project(self.session, "test")
msg = pagure.lib.query.new_issue(
session=self.session,
repo=repo,
title="Test issue #1",
content="We should work on this for the second time",
user="foo",
status="Open",
private=True,
)
self.session.commit()
self.assertEqual(msg.title, "Test issue #1")
msg = pagure.lib.query.new_issue(
session=self.session,
repo=repo,
title="Test issue #2",
content="We should work on this for the second time",
user="foo",
status="Open",
private=False,
)
self.session.commit()
self.assertEqual(msg.title, "Test issue #2")
def test_issue_list_anonymous(self):
""" Test the list of issues when user is logged out. """
output = self.app.get("/test/issues")
self.assertEqual(output.status_code, 200)
output_text = output.get_data(as_text=True)
self.assertIn("<title>Issues - test - Pagure</title>", output_text)
self.assertIn(
'<span class="fa fa-fw fa-exclamation-circle"></span> 1 Open Issues\n',
output_text,
)
def test_issue_list_admin(self):
"""Test the list of issues when user is an admin of the project."""
user = tests.FakeUser(username="pingou")
with tests.user_set(self.app.application, user):
output = self.app.get("/test/issues")
self.assertEqual(output.status_code, 200)
output_text = output.get_data(as_text=True)
self.assertIn("<title>Issues - test - Pagure</title>", output_text)
self.assertIn(
'<span class="fa fa-fw fa-exclamation-circle"></span> 2 Open Issues\n',
output_text,
)
def test_issue_list_author(self):
"""Test the list of issues when user is an admin of the project."""
user = tests.FakeUser(username="foo")
with tests.user_set(self.app.application, user):
output = self.app.get("/test/issues")
self.assertEqual(output.status_code, 200)
output_text = output.get_data(as_text=True)
self.assertIn("<title>Issues - test - Pagure</title>", output_text)
self.assertIn(
'<span class="fa fa-fw fa-exclamation-circle"></span> 2 Open Issues\n',
output_text,
)
def test_issue_list_authenticated(self):
"""Test the list of issues when user is authenticated but has no
special access to the project.
"""
user = tests.FakeUser(username="random")
with tests.user_set(self.app.application, user):
output = self.app.get("/test/issues")
self.assertEqual(output.status_code, 200)
output_text = output.get_data(as_text=True)
self.assertIn("<title>Issues - test - Pagure</title>", output_text)
self.assertIn(
'<span class="fa fa-fw fa-exclamation-circle"></span> 1 Open Issues\n',
output_text,
)
def test_issue_list_authenticated_ticket(self):
"""Test the list of issues when user is authenticated but has
ticket level access to the project.
"""
repo = pagure.lib.query._get_project(self.session, "test")
msg = pagure.lib.query.add_user_to_project(
session=self.session,
project=repo,
new_user="random",
user="pingou",
access="ticket",
)
self.session.commit()
self.assertEqual(msg, "User added")
user = tests.FakeUser(username="random")
with tests.user_set(self.app.application, user):
output = self.app.get("/test/issues")
self.assertEqual(output.status_code, 200)
output_text = output.get_data(as_text=True)
self.assertIn("<title>Issues - test - Pagure</title>", output_text)
self.assertIn(
'<span class="fa fa-fw fa-exclamation-circle"></span> 1 Open Issues\n',
output_text,
)
def test_issue_list_authenticated_commit(self):
"""Test the list of issues when user is authenticated but has
commit level access to the project.
"""
repo = pagure.lib.query._get_project(self.session, "test")
msg = pagure.lib.query.add_user_to_project(
session=self.session,
project=repo,
new_user="random",
user="pingou",
access="commit",
)
self.session.commit()
self.assertEqual(msg, "User added")
user = tests.FakeUser(username="random")
with tests.user_set(self.app.application, user):
output = self.app.get("/test/issues")
self.assertEqual(output.status_code, 200)
output_text = output.get_data(as_text=True)
self.assertIn("<title>Issues - test - Pagure</title>", output_text)
self.assertIn(
'<span class="fa fa-fw fa-exclamation-circle"></span> 2 Open Issues\n',
output_text,
)
def test_issue_list_authenticated_assigned(self):
"""Test the list of issues when user is authenticated and is
assigned to one of the issue.
"""
repo = pagure.lib.query._get_project(self.session, "test")
issue = pagure.lib.query.search_issues(self.session, repo, issueid=1)
issue.assignee_id = 3 # random
self.session.add(issue)
self.session.commit()
user = tests.FakeUser(username="random")
with tests.user_set(self.app.application, user):
output = self.app.get("/test/issues")
self.assertEqual(output.status_code, 200)
output_text = output.get_data(as_text=True)
self.assertIn("<title>Issues - test - Pagure</title>", output_text)
self.assertIn(
'<span class="fa fa-fw fa-exclamation-circle"></span> 2 Open Issues\n',
output_text,
)
def test_view_issue_anonymous(self):
""" Test accessing a private ticket when user is logged out. """
output = self.app.get("/test/issue/1")
self.assertEqual(output.status_code, 404)
def test_view_issue_admin(self):
"""Test accessing a private ticket when user is an admin of the
project.
"""
user = tests.FakeUser(username="pingou")
with tests.user_set(self.app.application, user):
output = self.app.get("/test/issue/1")
self.assertEqual(output.status_code, 200)
output_text = output.get_data(as_text=True)
self.assertIn(
"<title>Issue #1: Test issue #1 - test - Pagure</title>",
output_text,
)
self.assertIn(
'<span class="fa fa-fw text-success fa-exclamation-circle pt-1"></span>\n'
' <span class="text-success font-weight-bold">#1</span>\n',
output_text,
)
def test_view_issue_author(self):
"""Test accessing a private ticket when user opened the ticket."""
user = tests.FakeUser(username="foo")
with tests.user_set(self.app.application, user):
output = self.app.get("/test/issue/1")
self.assertEqual(output.status_code, 200)
output_text = output.get_data(as_text=True)
self.assertIn(
"<title>Issue #1: Test issue #1 - test - Pagure</title>",
output_text,
)
self.assertIn(
'<span class="fa fa-fw text-success fa-exclamation-circle pt-1"></span>\n'
' <span class="text-success font-weight-bold">#1</span>\n',
output_text,
)
def test_view_issue_authenticated(self):
"""Test accessing a private ticket when user is authenticated but
has no special access to the project.
"""
user = tests.FakeUser(username="random")
with tests.user_set(self.app.application, user):
output = self.app.get("/test/issue/1")
self.assertEqual(output.status_code, 404)
def test_view_issue_authenticated_ticket(self):
"""Test accessing a private ticket when user is authenticated and
has ticket level access to the project.
"""
repo = pagure.lib.query._get_project(self.session, "test")
msg = pagure.lib.query.add_user_to_project(
session=self.session,
project=repo,
new_user="random",
user="pingou",
access="ticket",
)
self.session.commit()
self.assertEqual(msg, "User added")
user = tests.FakeUser(username="random")
with tests.user_set(self.app.application, user):
output = self.app.get("/test/issue/1")
self.assertEqual(output.status_code, 404)
def test_view_issue_authenticated_commit(self):
"""Test accessing a private ticket when user is authenticated and
has commit level access to the project.
"""
repo = pagure.lib.query._get_project(self.session, "test")
msg = pagure.lib.query.add_user_to_project(
session=self.session,
project=repo,
new_user="random",
user="pingou",
access="commit",
)
self.session.commit()
self.assertEqual(msg, "User added")
user = tests.FakeUser(username="random")
with tests.user_set(self.app.application, user):
output = self.app.get("/test/issue/1")
self.assertEqual(output.status_code, 200)
output_text = output.get_data(as_text=True)
self.assertIn(
"<title>Issue #1: Test issue #1 - test - Pagure</title>",
output_text,
)
self.assertIn(
'<span class="fa fa-fw text-success fa-exclamation-circle pt-1"></span>\n'
' <span class="text-success font-weight-bold">#1</span>\n',
output_text,
)
def test_view_issue_authenticated_assigned(self):
"""Test accessing a private ticket when user is authenticated and
is assigned to one of the issue.
"""
repo = pagure.lib.query._get_project(self.session, "test")
issue = pagure.lib.query.search_issues(self.session, repo, issueid=1)
issue.assignee_id = 3 # random
self.session.add(issue)
self.session.commit()
user = tests.FakeUser(username="random")
with tests.user_set(self.app.application, user):
output = self.app.get("/test/issue/1")
self.assertEqual(output.status_code, 200)
output_text = output.get_data(as_text=True)
self.assertIn(
"<title>Issue #1: Test issue #1 - test - Pagure</title>",
output_text,
)
self.assertIn(
'<span class="fa fa-fw text-success fa-exclamation-circle pt-1"></span>\n'
' <span class="text-success font-weight-bold">#1</span>\n',
output_text,
)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 37.609467
| 90
| 0.58433
| 1,525
| 12,712
| 4.74623
| 0.110164
| 0.045593
| 0.030948
| 0.030948
| 0.876347
| 0.873446
| 0.873446
| 0.872202
| 0.854103
| 0.82882
| 0
| 0.009974
| 0.298065
| 12,712
| 337
| 91
| 37.721068
| 0.801188
| 0.110289
| 0
| 0.730924
| 0
| 0.044177
| 0.192548
| 0.039613
| 0
| 0
| 0
| 0
| 0.168675
| 1
| 0.060241
| false
| 0.004016
| 0.028112
| 0
| 0.092369
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
00675ab9db11009528da3ff0bc385d770e6eda52
| 2,468
|
py
|
Python
|
amb.py
|
RN-JK/Ubiart-Tape-Serializer
|
879bfe27b11c290e5653dac8735ddba322bb5716
|
[
"MIT"
] | null | null | null |
amb.py
|
RN-JK/Ubiart-Tape-Serializer
|
879bfe27b11c290e5653dac8735ddba322bb5716
|
[
"MIT"
] | null | null | null |
amb.py
|
RN-JK/Ubiart-Tape-Serializer
|
879bfe27b11c290e5653dac8735ddba322bb5716
|
[
"MIT"
] | null | null | null |
import os, struct, json, zlib
print("AMB TPL Serializer by: JackLSummer15")
#make output directory
try:
os.mkdir('amb')
except:
pass
if __name__ == '__main__':
with open('amb_inject.json') as inputfile:
obj = json.load(inputfile)
codenames = obj[0]['maps']
ambsounds = obj[0]['ambs']
jdversion = obj[0]['JDVersion']
file_count = 0
for i, codename in enumerate(codenames, 1):
for ambsfx in ambsounds:
codenamelow=codename.lower()
ambname='amb_'+codenamelow+'_'+ambsfx.lower()
if jdversion==2015:
header=b'\x00\x00\x00\x01\x00\x00\x02\xB7\x1B\x85\x7B\xCE\x00\x00\x00\x6C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xD9\x4D\x6C\x53\x00\x00\x01\x18\x00\x00\x00\x01\x00\x00\x00\xF8\xD8\x46\x77\x1B\x00\x00\x00\x00\xEB\x53\x7A\x60\xFF\xFF\xFF\xFF\x00\x00\x00\x00\xFF\xFF\xFF\xFF\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01'
path='world/jd2015/'+codenamelow+'/audio/amb/'
else:
header=b'\x00\x00\x00\x01\x00\x00\x02\xB5\x1B\x85\x7B\xCE\x00\x00\x00\x6C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xD9\x4D\x6C\x53\x00\x00\x01\x18\x00\x00\x00\x01\x00\x00\x00\xF8\x28\xB8\x81\xEC\x00\x00\x00\x00\xEB\x53\x7A\x60\xFF\xFF\xFF\xFF\x00\x00\x00\x00\xFF\xFF\xFF\xFF\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01'
path='world/maps/'+codenamelow+'/audio/amb/'
ambwav=ambname+'.wav'
ambtpl=ambname+'.tpl.ckd'
amb=open("amb/"+ambtpl,"wb")
print("making "+codename+' + '+ambsfx+'...')
#header
amb.write(header)
#making path
amb.write(struct.pack(">I",len(ambwav))+ambwav.encode('utf-8')+struct.pack(">I",len(path))+path.encode('utf-8')+struct.pack("<I",zlib.crc32(ambwav.encode('utf-8'))))
#ending
amb.write(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x60\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\xFF\xFF\xFF\xFF\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3F\x80\x00\x00\x3F\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF\xFF\xFF\x00\x00\x00\x00')
amb.close()
| 46.566038
| 485
| 0.6641
| 461
| 2,468
| 3.529284
| 0.210412
| 0.722803
| 0.929318
| 1.069453
| 0.598648
| 0.598648
| 0.56177
| 0.554395
| 0.539644
| 0.505224
| 0
| 0.256114
| 0.121961
| 2,468
| 52
| 486
| 47.461538
| 0.494693
| 0.017828
| 0
| 0
| 0
| 0.096774
| 0.6109
| 0.534009
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0.032258
| 0.032258
| 0
| 0.032258
| 0.064516
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
006b1e1bf7248ad6e3d677c22a91027bd235e32e
| 18,406
|
py
|
Python
|
model_search/metric_fns_test.py
|
dywsjtu/model_search
|
116c4f9016d8b89cf06d057dda020dae3371f211
|
[
"Apache-2.0"
] | 3,315
|
2021-01-20T15:21:37.000Z
|
2022-03-30T18:21:29.000Z
|
model_search/metric_fns_test.py
|
dywsjtu/model_search
|
116c4f9016d8b89cf06d057dda020dae3371f211
|
[
"Apache-2.0"
] | 57
|
2021-01-19T20:51:03.000Z
|
2022-03-24T11:04:07.000Z
|
model_search/metric_fns_test.py
|
dywsjtu/model_search
|
116c4f9016d8b89cf06d057dda020dae3371f211
|
[
"Apache-2.0"
] | 380
|
2021-02-20T01:31:35.000Z
|
2022-03-31T16:48:58.000Z
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# List as: python3
"""Tests for model_search.metric_fns."""
from absl.testing import parameterized
from model_search import metric_fns
import numpy as np
import tensorflow.compat.v2 as tf
class MetricFnsTest(tf.test.TestCase, parameterized.TestCase):
# pylint: disable=g-long-lambda
# tf.constant must be called in a lambda, otherwise the Op would be created
# in a different graph from where it would be used, which is not allowed.
@parameterized.named_parameters(
{
'testcase_name':
'int64_label_single_task',
'label_vocabulary':
None,
'labels_fn':
lambda: tf.constant([1, 1, 1, 1, 1], dtype=tf.int64),
'predictions_fn':
lambda: {
'predictions': tf.constant([1, 0, 0, 0, 0], dtype=tf.int64),
},
'expected_metric_dict': {
'accuracy': np.float32(0.2)
}
}, {
'testcase_name':
'string_label_single_task',
'label_vocabulary': ['A', 'B', 'C', 'D', 'E'],
'labels_fn':
lambda: tf.constant(['A', 'B', 'C', 'D', 'E'], dtype=tf.string),
'predictions_fn':
lambda: {
'predictions': tf.constant([0, 0, 0, 0, 0], dtype=tf.int64),
},
'expected_metric_dict': {
'accuracy': np.float32(0.2)
}
}, {
'testcase_name':
'string_label_no_vocab_single_task',
'label_vocabulary':
None,
'labels_fn':
lambda: tf.constant(['A', 'B', 'C', 'D', 'E'], dtype=tf.string),
'predictions_fn':
lambda: {
'predictions': tf.constant([0, 0, 0, 0, 0], dtype=tf.int64),
},
'expected_metric_dict': {}
}, {
'testcase_name':
'int64_label_multi_task',
'label_vocabulary':
None,
'labels_fn':
lambda: {
'task_a': tf.constant([1, 1, 1, 1, 1], dtype=tf.int64),
'task_b': tf.constant([1, 1, 1, 1, 1], dtype=tf.int64),
},
'predictions_fn':
lambda: {
'predictions':
tf.constant([1, 0, 0, 0, 0], dtype=tf.int64),
'predictions/task_a':
tf.constant([1, 0, 0, 0, 0], dtype=tf.int64),
'predictions/task_b':
tf.constant([1, 1, 1, 0, 0], dtype=tf.int64),
},
'expected_metric_dict': {
'accuracy/task_a': np.float32(0.2),
'accuracy/task_b': np.float32(0.6),
},
}, {
'testcase_name':
'string_label_multi_task',
'label_vocabulary': {
'task_a': ['A', 'B', 'C', 'D', 'E'],
'task_b': ['F', 'G', 'H', 'I', 'J'],
},
'labels_fn':
lambda: {
'task_a':
tf.constant(['A', 'B', 'C', 'D', 'E'], dtype=tf.string),
'task_b':
tf.constant(['F', 'G', 'H', 'I', 'J'], dtype=tf.string),
},
'predictions_fn':
lambda: {
'predictions':
tf.constant([0, 0, 0, 0, 0], dtype=tf.int64),
'predictions/task_a':
tf.constant([0, 0, 0, 0, 0], dtype=tf.int64),
'predictions/task_b':
tf.constant([1, 1, 1, 1, 1], dtype=tf.int64),
},
'expected_metric_dict': {
'accuracy/task_a': np.float32(0.2),
'accuracy/task_b': np.float32(0.2),
},
}, {
'testcase_name':
'mixed_label_multi_task',
'label_vocabulary': {
'task_a': ['A', 'B', 'C', 'D', 'E'],
},
'labels_fn':
lambda: {
'task_a':
tf.constant(['A', 'B', 'C', 'D', 'E'], dtype=tf.string),
'task_b':
tf.constant([1, 1, 0, 0, 0], dtype=tf.int64),
},
'predictions_fn':
lambda: {
'predictions':
tf.constant([0, 0, 0, 0, 0], dtype=tf.int64),
'predictions/task_a':
tf.constant([0, 0, 0, 0, 0], dtype=tf.int64),
'predictions/task_b':
tf.constant([1, 1, 1, 1, 1], dtype=tf.int64),
},
'expected_metric_dict': {
'accuracy/task_a': np.float32(0.2),
'accuracy/task_b': np.float32(0.4),
},
}, {
'testcase_name':
'string_no_vocab_multi_task',
'label_vocabulary':
None,
'labels_fn':
lambda: {
'task_a':
tf.constant(['A', 'B', 'C', 'D', 'E'], dtype=tf.string),
'task_b':
tf.constant([1, 1, 0, 0, 0], dtype=tf.int64),
},
'predictions_fn':
lambda: {
'predictions':
tf.constant([0, 0, 0, 0, 0], dtype=tf.int64),
'predictions/task_a':
tf.constant([0, 0, 0, 0, 0], dtype=tf.int64),
'predictions/task_b':
tf.constant([1, 1, 1, 1, 1], dtype=tf.int64),
},
'expected_metric_dict': {
'accuracy/task_b': np.float32(0.4),
},
})
# pylint: enable=g-long-lambda
def test_make_accuracy_metric_fn(self, label_vocabulary, labels_fn,
predictions_fn, expected_metric_dict):
# Force graph mode
with tf.compat.v1.Graph().as_default():
metric_fn = metric_fns.make_accuracy_metric_fn(label_vocabulary)
actual_metric_dict = metric_fn(labels_fn(), predictions_fn())
with self.test_session() as sess:
sess.run(tf.compat.v1.initializers.local_variables())
sess.run(tf.compat.v1.initializers.tables_initializer())
actual_metric_dict_val = sess.run(actual_metric_dict)
actual_metric_dict_val_clean = {
metric_key: metric_val[1]
for metric_key, metric_val in actual_metric_dict_val.items()
}
self.assertEqual(expected_metric_dict, actual_metric_dict_val_clean)
# pylint: disable=g-long-lambda
@parameterized.named_parameters(
{
'testcase_name':
'roc_perfect',
'metric_fn_factory':
metric_fns.make_auc_roc_metric_fn,
'label_vocabulary':
None,
'labels_fn':
lambda: tf.constant([1, 0], dtype=tf.int64),
'predictions_fn':
lambda: {
'probabilities':
tf.constant([[0.0, 1.0], [1.0, 0.0]], dtype=tf.float32),
},
'expected_metric_dict': {
'auc_roc': np.float32(1.0)
}
}, {
'testcase_name':
'roc_perfect_vocab',
'metric_fn_factory':
metric_fns.make_auc_roc_metric_fn,
'label_vocabulary': ['ZERO', 'ONE'],
'labels_fn':
lambda: tf.constant(['ONE', 'ZERO'], dtype=tf.string),
'predictions_fn':
lambda: {
'probabilities':
tf.constant([[0.0, 1.0], [1.0, 0.0]], dtype=tf.float32),
},
'expected_metric_dict': {
'auc_roc': np.float32(1.0)
}
}, {
'testcase_name':
'roc_random',
'metric_fn_factory':
metric_fns.make_auc_roc_metric_fn,
'label_vocabulary':
None,
'labels_fn':
lambda: tf.constant([1, 0], dtype=tf.int64),
'predictions_fn':
lambda: {
'probabilities':
tf.constant([[0.5, 0.5], [0.5, 0.5]], dtype=tf.float32),
},
'expected_metric_dict': {
'auc_roc': np.float32(0.5)
}
}, {
'testcase_name':
'pr_perfect',
'metric_fn_factory':
metric_fns.make_auc_pr_metric_fn,
'label_vocabulary':
None,
'labels_fn':
lambda: tf.constant([1, 0], dtype=tf.int64),
'predictions_fn':
lambda: {
'probabilities':
tf.constant([[0.0, 1.0], [1.0, 0.0]], dtype=tf.float32),
},
'expected_metric_dict': {
'auc_pr': np.float32(1.0)
}
}, {
'testcase_name':
'pr_perfect_vocab',
'metric_fn_factory':
metric_fns.make_auc_pr_metric_fn,
'label_vocabulary': ['ZERO', 'ONE'],
'labels_fn':
lambda: tf.constant(['ONE', 'ZERO'], dtype=tf.string),
'predictions_fn':
lambda: {
'probabilities':
tf.constant([[0.0, 1.0], [1.0, 0.0]], dtype=tf.float32),
},
'expected_metric_dict': {
'auc_pr': np.float32(1.0)
}
}, {
'testcase_name':
'pr_random',
'metric_fn_factory':
metric_fns.make_auc_pr_metric_fn,
'label_vocabulary':
None,
'labels_fn':
lambda: tf.constant([1, 0], dtype=tf.int64),
'predictions_fn':
lambda: {
'probabilities':
tf.constant([[0.5, 0.5], [0.5, 0.5]], dtype=tf.float32),
},
'expected_metric_dict': {
'auc_pr': np.float32(0.5)
}
})
# pylint: enable=g-long-lambda
def test_auc_metric_fn(self, metric_fn_factory, label_vocabulary, labels_fn,
predictions_fn, expected_metric_dict):
# Force graph mode
with tf.compat.v1.Graph().as_default():
metric_fn = metric_fn_factory(label_vocabulary)
actual_metric_dict = metric_fn(labels_fn(), predictions_fn())
with self.test_session() as sess:
sess.run(tf.compat.v1.initializers.local_variables())
sess.run(tf.compat.v1.initializers.tables_initializer())
actual_metric_dict_val = sess.run(actual_metric_dict)
actual_metric_dict_val_clean = {
metric_key: metric_val[1]
for metric_key, metric_val in actual_metric_dict_val.items()
}
self.assertAllClose(expected_metric_dict, actual_metric_dict_val_clean)
# pylint: disable=g-long-lambda
@parameterized.named_parameters(
{
'testcase_name':
'roc_multi_task',
'metric_fn_factory':
metric_fns.make_auc_roc_metric_fn,
'label_vocabulary':
None,
'labels_fn':
lambda: {
'task_a': tf.constant([1, 0], dtype=tf.int64),
'task_b': tf.constant([1, 0], dtype=tf.int64),
},
'predictions_fn':
lambda: {
'probabilities':
tf.constant([[0.0, 1.0], [1.0, 0.0]], dtype=tf.float32),
'probabilities/task_a':
tf.constant([[0.0, 1.0], [1.0, 0.0]], dtype=tf.float32),
'probabilities/task_b':
tf.constant([[0.0, 1.0], [1.0, 0.0]], dtype=tf.float32),
},
'exception_class':
NotImplementedError,
}, {
'testcase_name':
'roc_rank3_prob_tensor',
'metric_fn_factory':
metric_fns.make_auc_roc_metric_fn,
'label_vocabulary':
None,
'labels_fn':
lambda: tf.constant([1, 0], dtype=tf.int64),
'predictions_fn':
lambda: {
'probabilities':
tf.constant([[[0.5, 0.5], [0.5, 0.5]],
[[0.5, 0.5], [0.5, 0.5]]],
dtype=tf.float32),
},
'exception_class':
ValueError,
}, {
'testcase_name':
'roc_prob_tensor_3_classes',
'metric_fn_factory':
metric_fns.make_auc_roc_metric_fn,
'label_vocabulary':
None,
'labels_fn':
lambda: tf.constant([2, 1, 0], dtype=tf.int64),
'predictions_fn':
lambda: {
'probabilities':
tf.constant([[0.0, 1.0, 0.0], [1.0, 0.0, 0.0]],
dtype=tf.float32),
},
'exception_class':
ValueError,
}, {
'testcase_name':
'pr_multi_task',
'metric_fn_factory':
metric_fns.make_auc_pr_metric_fn,
'label_vocabulary':
None,
'labels_fn':
lambda: {
'task_a': tf.constant([1, 0], dtype=tf.int64),
'task_b': tf.constant([1, 0], dtype=tf.int64),
},
'predictions_fn':
lambda: {
'probabilities':
tf.constant([[0.0, 1.0], [1.0, 0.0]], dtype=tf.float32),
'probabilities/task_a':
tf.constant([[0.0, 1.0], [1.0, 0.0]], dtype=tf.float32),
'probabilities/task_b':
tf.constant([[0.0, 1.0], [1.0, 0.0]], dtype=tf.float32),
},
'exception_class':
NotImplementedError,
}, {
'testcase_name':
'pr_rank3_prob_tensor',
'metric_fn_factory':
metric_fns.make_auc_pr_metric_fn,
'label_vocabulary':
None,
'labels_fn':
lambda: tf.constant([1, 0], dtype=tf.int64),
'predictions_fn':
lambda: {
'probabilities':
tf.constant([[[0.5, 0.5], [0.5, 0.5]],
[[0.5, 0.5], [0.5, 0.5]]],
dtype=tf.float32),
},
'exception_class':
ValueError,
}, {
'testcase_name':
'pr_prob_tensor_3_classes',
'metric_fn_factory':
metric_fns.make_auc_pr_metric_fn,
'label_vocabulary':
None,
'labels_fn':
lambda: tf.constant([2, 1, 0], dtype=tf.int64),
'predictions_fn':
lambda: {
'probabilities':
tf.constant([[0.0, 1.0, 0.0], [1.0, 0.0, 0.0]],
dtype=tf.float32),
},
'exception_class':
ValueError,
}, {
'testcase_name':
'roc_string_label_no_vocab',
'metric_fn_factory':
metric_fns.make_auc_roc_metric_fn,
'label_vocabulary':
None,
'labels_fn':
lambda: tf.constant(['ONE', 'ZERO'], dtype=tf.string),
'predictions_fn':
lambda: {
'probabilities':
tf.constant([[1.0, 0.0], [0.0, 1.0]], dtype=tf.float32),
},
'exception_class':
ValueError,
})
# pylint: enable=g-long-lambda
def test_auc_metric_fn_error(self, metric_fn_factory, label_vocabulary,
labels_fn, predictions_fn, exception_class):
with self.assertRaises(exception_class):
metric_fn = metric_fn_factory(label_vocabulary)
metric_fn(labels_fn(), predictions_fn())
def test_create_num_parameters_metric_fn_no_tower(self):
# Force graph mode
with tf.compat.v1.Graph().as_default():
_ = tf.compat.v1.get_variable(
name='w', shape=[10, 2], dtype=tf.float32, trainable=True)
_ = tf.compat.v1.get_variable(
name='b', shape=[2], dtype=tf.float32, trainable=True)
metric_fn = metric_fns.create_num_parameters_metric_fn(None)
metrics_dict = metric_fn(None, None)
with self.test_session() as sess:
self.assertEqual(22, sess.run(metrics_dict['num_parameters'][1]))
def test_create_num_parameters_metric_fn_with_tower(self):
# Force graph mode
with tf.compat.v1.Graph().as_default():
_ = tf.compat.v1.get_variable(
name='Phoenix/name', shape=[10, 2], dtype=tf.float32, trainable=True)
_ = tf.compat.v1.get_variable(
name='b', shape=[2], dtype=tf.float32, trainable=True)
metric_fn = metric_fns.create_num_parameters_metric_fn('name')
metrics_dict = metric_fn(None, None)
with self.test_session() as sess:
self.assertEqual(20, sess.run(metrics_dict['num_parameters'][1]))
def test_combine_metric_fns(self):
# Force graph mode
with tf.compat.v1.Graph().as_default():
def metric_fn_1(labels, predictions, weights=None):
del labels
del predictions
del weights
one = tf.constant(1, dtype=tf.int32)
return {'foo1': (one, one)}
def metric_fn_2(labels, predictions, weights=None):
del labels
del predictions
del weights
two = tf.constant(2, dtype=tf.int32)
return {'foo2': (two, two)}
metric_fn_combined = metric_fns.combine_metric_fns(
[metric_fn_1, metric_fn_2])
metrics_dict = metric_fn_combined(None, None)
with self.test_session() as sess:
self.assertEqual(1, sess.run(metrics_dict['foo1'][1]))
self.assertEqual(2, sess.run(metrics_dict['foo2'][1]))
if __name__ == '__main__':
tf.enable_v2_behavior()
tf.test.main()
| 36.375494
| 79
| 0.491253
| 2,023
| 18,406
| 4.225902
| 0.102323
| 0.021991
| 0.018599
| 0.027372
| 0.843373
| 0.830506
| 0.825477
| 0.802667
| 0.799041
| 0.784185
| 0
| 0.04495
| 0.3739
| 18,406
| 505
| 80
| 36.447525
| 0.696893
| 0.054819
| 0
| 0.746725
| 0
| 0
| 0.166427
| 0.015433
| 0
| 0
| 0
| 0
| 0.015284
| 1
| 0.017467
| false
| 0
| 0.008734
| 0
| 0.032751
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
00e869f3b50d5da33314420dd1888ad061df424b
| 68,590
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/backup_results_unknownr/cmp_gromacs/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/backup_results_unknownr/cmp_gromacs/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/backup_results_unknownr/cmp_gromacs/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.306958,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.443787,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.76799,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.744131,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 1.28857,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.739029,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 2.77173,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.464485,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 8.91233,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.334012,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0269753,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.3055,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.199499,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.639512,
'Execution Unit/Register Files/Runtime Dynamic': 0.226475,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.822308,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.84225,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 5.68962,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00124349,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00124349,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.0010876,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000423501,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00286582,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00644039,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0117608,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.191784,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.399668,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.651384,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.26104,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0723851,
'L2/Runtime Dynamic': 0.013777,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 6.64646,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.60142,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.175005,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.175005,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 7.47624,
'Load Store Unit/Runtime Dynamic': 3.63949,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.431533,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.863066,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.153152,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.154232,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.065541,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.823265,
'Memory Management Unit/Runtime Dynamic': 0.219773,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 30.8147,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.16529,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0520731,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.365967,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 1.58333,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 12.407,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.153474,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.323234,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.889369,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.319682,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.515635,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.260275,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.09559,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.229269,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.80398,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.168021,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0134089,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.151959,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.099167,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.31998,
'Execution Unit/Register Files/Runtime Dynamic': 0.112576,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.358512,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.808434,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.74521,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000575747,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000575747,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000505076,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000197493,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00142454,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00308111,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00539152,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0953318,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.06392,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.196904,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.32379,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.57673,
'Instruction Fetch Unit/Runtime Dynamic': 0.624498,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0344139,
'L2/Runtime Dynamic': 0.00634291,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.95497,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.30647,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0879289,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0879288,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.37019,
'Load Store Unit/Runtime Dynamic': 1.82803,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.216818,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.433635,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0769493,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0774625,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.377032,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0322897,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.665326,
'Memory Management Unit/Runtime Dynamic': 0.109752,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 23.0401,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.441986,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.019802,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.154726,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.616515,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 5.93035,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.155088,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.324501,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.89669,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.318493,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.513717,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.259307,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.09152,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.226788,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.81282,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.169404,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.013359,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.152259,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0987981,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.321663,
'Execution Unit/Register Files/Runtime Dynamic': 0.112157,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.359548,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.808066,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.74162,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000525531,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000525531,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000460386,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000179671,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00141924,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00293069,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00494412,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0949771,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.04136,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.193978,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.322585,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.55307,
'Instruction Fetch Unit/Runtime Dynamic': 0.619415,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0324443,
'L2/Runtime Dynamic': 0.00559454,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.9073,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.28315,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0863866,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0863866,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.31523,
'Load Store Unit/Runtime Dynamic': 1.79557,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.213015,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.426029,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0755996,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0760829,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.375629,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0318111,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.661605,
'Memory Management Unit/Runtime Dynamic': 0.107894,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 22.9646,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.445624,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0197927,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.154067,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.619484,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 5.88957,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.123459,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.299658,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.697425,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.274346,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.44251,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.223364,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.940221,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.206848,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.43897,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.131759,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0115073,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.128185,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0851036,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.259943,
'Execution Unit/Register Files/Runtime Dynamic': 0.0966109,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.300921,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.680616,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.42248,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000743591,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000743591,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00065549,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000258029,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00122252,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00336519,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00684999,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0818123,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.20396,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.174669,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.277871,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.67504,
'Instruction Fetch Unit/Runtime Dynamic': 0.544568,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0365819,
'L2/Runtime Dynamic': 0.00712371,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.36629,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.02666,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0688837,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0688837,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.69157,
'Load Store Unit/Runtime Dynamic': 1.43526,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.169855,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.339711,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0602822,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0608277,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.323563,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0286458,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.583226,
'Memory Management Unit/Runtime Dynamic': 0.0894735,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 21.0149,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.346596,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0165957,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.133413,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.496604,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.99551,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 0.4907122877281571,
'Runtime Dynamic': 0.4907122877281571,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.0912997,
'Runtime Dynamic': 0.0508145,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 97.9256,
'Peak Power': 131.038,
'Runtime Dynamic': 29.2733,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 97.8343,
'Total Cores/Runtime Dynamic': 29.2225,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.0912997,
'Total L3s/Runtime Dynamic': 0.0508145,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.043764
| 124
| 0.681994
| 8,082
| 68,590
| 5.781985
| 0.067805
| 0.123604
| 0.11299
| 0.093473
| 0.939268
| 0.931243
| 0.918596
| 0.886518
| 0.861074
| 0.842628
| 0
| 0.131638
| 0.224391
| 68,590
| 914
| 125
| 75.043764
| 0.746762
| 0
| 0
| 0.642232
| 0
| 0
| 0.657594
| 0.048111
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
00f130bf5707d83239c652881424a9cfa340abcd
| 8,855
|
py
|
Python
|
parser/team24/classesQuerys.py
|
Mruiz-99/tytus
|
74a17fcbbe80c89eadf9111c5ecc04d82959c85b
|
[
"MIT"
] | null | null | null |
parser/team24/classesQuerys.py
|
Mruiz-99/tytus
|
74a17fcbbe80c89eadf9111c5ecc04d82959c85b
|
[
"MIT"
] | null | null | null |
parser/team24/classesQuerys.py
|
Mruiz-99/tytus
|
74a17fcbbe80c89eadf9111c5ecc04d82959c85b
|
[
"MIT"
] | null | null | null |
#
from mathtrig import *
#
class query():
' Clase abstracta query'
class select(query):
def __init__(self, distinct=False, select_list=[], table_expression=[], condition=[], group=False, having=[], orderby=[], orderAscDesc='F', limit=0, offset=0):
self.distinct = distinct
self.select_list = select_list
self.table_expression = table_expression
self.condition = condition
self.group = group
self.having = having
self.orderby = orderby
self.orderascdesc = orderAscDesc
self.limit = limit
self.offset = offset
class exp_query():
'Abstract Class'
class exp_id(exp_query):
'Esta expresion devuelve'
'el arreglo de la base de datos'
def __init__(self, val, table):
self.val = val
self.table = table
class exp_bool(exp_query):
'Esta expresion devuelve un'
'boolean'
def __init__(self, val):
self.val = val
class exp_text(exp_query):
'Devuelve el texto'
def __init__(self, val):
self.val = val
class exp_int(exp_query):
'Devuelve un entero'
def __init__(self, val):
self.val = val
class exp_dec(exp_query):
'Devuelve un flotante'
def __init__(self, val):
self.val = val
class exp_suma(exp_query):
'Suma las dos expresiones'
def __init__(self, exp1, exp2):
self.exp1 = exp1
self.exp2 = exp2
class exp_resta(exp_query):
'Suma las dos expresiones'
def __init__(self, exp1, exp2):
self.exp1 = exp1
self.exp2 = exp2
class exp_multiplicacion(exp_query):
'Suma las dos expresiones'
def __init__(self, exp1, exp2):
self.exp1 = exp1
self.exp2 = exp2
class exp_division(exp_query):
'Suma las dos expresiones'
def __init__(self, exp1, exp2):
self.exp1 = exp1
self.exp2 = exp2
class select_column():
'Abstract Class'
class column_id(select_column):
def __init__(self, id, table, alias):
self.id = id
self. table = table
self.alias = alias
class column_mathtrig(select_column):
'Abstract Class'
class math_abs(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_cbrt(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_ceil(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_degrees(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_div(column_mathtrig):
def __init__(self, exp1, exp2, alias):
self.exp1 = exp1
self.exp2 = exp2
self.alias = alias
class math_factorial(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_floor(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_gcd(column_mathtrig):
def __init__(self, exp1, exp2, alias):
self.exp1 = exp1
self.exp2 = exp2
self.alias = alias
class math_ln(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_log(column_mathtrig):
def __init__(self, exp1, exp2, alias):
self.exp1 = exp1
self.exp2 = exp2
self.alias = alias
class math_log10(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_mod(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_pi(column_mathtrig):
def __init__(self, alias):
self.val = pi()
self.alias = alias
class math_power(column_mathtrig):
def __init__(self, exp1, exp2, alias):
self.exp1 = exp1
self.exp2 = exp2
self.alias = alias
class math_radians(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_round(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_sign(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_sqrt(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_widthBucket(column_mathtrig):
def __init__(self, exp1, exp2, exp3, exp4, alias):
self.exp1 = exp1
self.exp2 = exp2
self.exp3 = exp3
self.exp4 = exp4
self.alias = alias
class math_trunc(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class math_random(column_mathtrig):
def __init__(self, alias):
self.alias = alias
class trig_acos(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class trig_acosd(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class trig_asin(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class trig_asind(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class trig_atan(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class trig_atand(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class trig_atan2(column_mathtrig):
def __init__(self, exp1, exp2, alias):
self.exp1 = exp1
self.exp2 = exp2
self.alias = alias
class trig_atan2d(column_mathtrig):
def __init__(self, exp1, exp2, alias):
self.exp = exp1
self.exp2 = exp2
self.alias = alias
class trig_cos(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class trig_cosd(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class trig_cot(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class trig_cotd(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class trig_sin(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class trig_sind(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class trig_tan(column_mathtrig):
def __init__(self, exp, alias):
self.exp = exp
self.alias = alias
class trig_tand(column_mathtrig):
def __init__ (self,exp,alias):
self.exp = exp
self.alias = alias
class trig_sinh(column_mathtrig):
def __init__ (self,exp,alias):
self.exp = exp
self.alias = alias
class trig_cosh(column_mathtrig):
def __init__ (self,exp,alias):
self.exp = exp
self.alias = alias
class trig_tanh(column_mathtrig):
def __init__ (self,exp,alias):
self.exp = exp
self.alias = alias
class trig_asinh(column_mathtrig):
def __init__ (self,exp,alias):
self.exp = exp
self.alias = alias
class trig_acosh(column_mathtrig):
def __init__ (self,exp,alias):
self.exp = exp
self.alias = alias
class trig_atanh(column_mathtrig):
def __init__ (self,exp,alias):
self.exp = exp
self.alias = alias
class column_function(select_column):
'clase Abstracta'
class fun_length(column_function):
def __init__ (self,exp,alias):
self.exp = exp
self.alias = alias
class fun_substring(column_function):
def __init__ (self,exp,alias):
self.exp = exp
self.alias = alias
class fun_trim(column_function):
def __init__ (self,exp,alias):
self.exp = exp
self.alias = alias
class fun_md5(column_function):
def __init__ (self,exp,alias):
self.exp = exp
self.alias = alias
class fun_sha256(column_function):
def __init__ (self,exp,alias):
self.exp = exp
self.alias = alias
class fun_substr(column_function):
def __init__ (self,exp,alias):
self.exp = exp
self.alias = alias
class fun_convert(column_function):
def __init__ (self,exp,type,alias):
self.exp = exp
self.type = type
self.alias = alias
class fun_greatest(column_function):
def __init__ (self,lexps,alias):
self.lexps = lexps
self.alias = alias
class fun_least(column_function):
def __init__ (self,lexps,alias):
self.lexps = lexps
self.alias = alias
| 21.083333
| 163
| 0.626539
| 1,144
| 8,855
| 4.51049
| 0.103147
| 0.112597
| 0.134302
| 0.191473
| 0.800775
| 0.749806
| 0.74438
| 0.724806
| 0.718411
| 0.677132
| 0
| 0.012619
| 0.275099
| 8,855
| 420
| 164
| 21.083333
| 0.791245
| 0.032863
| 0
| 0.65625
| 0
| 0
| 0.035803
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.21875
| false
| 0
| 0.003472
| 0
| 0.458333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dae571122250d18334e8836a87dfc233490a38b1
| 47
|
py
|
Python
|
katas/kyu_6/what_is_the_point.py
|
the-zebulan/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 40
|
2016-03-09T12:26:20.000Z
|
2022-03-23T08:44:51.000Z
|
katas/kyu_6/what_is_the_point.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | null | null | null |
katas/kyu_6/what_is_the_point.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 36
|
2016-11-07T19:59:58.000Z
|
2022-03-31T11:18:27.000Z
|
def pointless(*args):
return "Rick Astley"
| 15.666667
| 24
| 0.680851
| 6
| 47
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191489
| 47
| 2
| 25
| 23.5
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0.234043
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
975ac5854c72dd714664faf801b386d64a60ea45
| 35,292
|
py
|
Python
|
feersum_nlu/api/dashboard_api.py
|
praekelt/feersum-nlu-api-wrappers
|
6580e2bab2c8a764fe868a505330b3fee6029074
|
[
"BSD-3-Clause"
] | 9
|
2017-10-10T12:24:23.000Z
|
2021-08-18T14:07:51.000Z
|
feersum_nlu/api/dashboard_api.py
|
praekelt/feersum-nlu-api-wrappers
|
6580e2bab2c8a764fe868a505330b3fee6029074
|
[
"BSD-3-Clause"
] | 1
|
2020-12-06T11:03:25.000Z
|
2021-04-14T05:21:23.000Z
|
feersum_nlu/api/dashboard_api.py
|
praekelt/feersum-nlu-api-wrappers
|
6580e2bab2c8a764fe868a505330b3fee6029074
|
[
"BSD-3-Clause"
] | 2
|
2019-02-12T08:26:06.000Z
|
2022-02-01T09:39:47.000Z
|
# coding: utf-8
"""
FeersumNLU API
This is the HTTP API for Feersum NLU. See https://github.com/praekelt/feersum-nlu-api-wrappers for examples of how to use the API. # noqa: E501
OpenAPI spec version: 2.0.54.dev2
Contact: nlu@feersum.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from feersum_nlu.api_client import ApiClient
class DashboardApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def dashboard_audio_get_details(self, **kwargs): # noqa: E501
"""Your audio service dashboard. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_audio_get_details(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dashboard_audio_get_details_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.dashboard_audio_get_details_with_http_info(**kwargs) # noqa: E501
return data
def dashboard_audio_get_details_with_http_info(self, **kwargs): # noqa: E501
"""Your audio service dashboard. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_audio_get_details_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dashboard_audio_get_details" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/audio/v2/dashboard', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dashboard_audio_get_details_with_params(self, params, **kwargs): # noqa: E501
"""Your audio service dashboard. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_audio_get_details_with_params(params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DashboardParams params: Params like 'show_data_objects' that influence the dashboard's response. (required)
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dashboard_audio_get_details_with_params_with_http_info(params, **kwargs) # noqa: E501
else:
(data) = self.dashboard_audio_get_details_with_params_with_http_info(params, **kwargs) # noqa: E501
return data
def dashboard_audio_get_details_with_params_with_http_info(self, params, **kwargs): # noqa: E501
"""Your audio service dashboard. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_audio_get_details_with_params_with_http_info(params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DashboardParams params: Params like 'show_data_objects' that influence the dashboard's response. (required)
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['params', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dashboard_audio_get_details_with_params" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'params' is set
if ('params' not in params or
params['params'] is None):
raise ValueError("Missing the required parameter `params` when calling `dashboard_audio_get_details_with_params`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'params' in params:
body_params = params['params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/audio/v2/dashboard', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dashboard_get_details(self, **kwargs): # noqa: E501
"""Your root service dashboard. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_get_details(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dashboard_get_details_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.dashboard_get_details_with_http_info(**kwargs) # noqa: E501
return data
def dashboard_get_details_with_http_info(self, **kwargs): # noqa: E501
"""Your root service dashboard. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_get_details_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dashboard_get_details" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/dashboard/v2/dashboard', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dashboard_get_details_with_params(self, params, **kwargs): # noqa: E501
"""Your root service dashboard. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_get_details_with_params(params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DashboardParams params: Params like 'show_data_objects' that influence the dashboard's response. (required)
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dashboard_get_details_with_params_with_http_info(params, **kwargs) # noqa: E501
else:
(data) = self.dashboard_get_details_with_params_with_http_info(params, **kwargs) # noqa: E501
return data
def dashboard_get_details_with_params_with_http_info(self, params, **kwargs): # noqa: E501
"""Your root service dashboard. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_get_details_with_params_with_http_info(params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DashboardParams params: Params like 'show_data_objects' that influence the dashboard's response. (required)
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['params', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dashboard_get_details_with_params" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'params' is set
if ('params' not in params or
params['params'] is None):
raise ValueError("Missing the required parameter `params` when calling `dashboard_get_details_with_params`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'params' in params:
body_params = params['params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/dashboard/v2/dashboard', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dashboard_nlu_get_details(self, **kwargs): # noqa: E501
"""Your nlu service dashboard. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_nlu_get_details(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dashboard_nlu_get_details_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.dashboard_nlu_get_details_with_http_info(**kwargs) # noqa: E501
return data
def dashboard_nlu_get_details_with_http_info(self, **kwargs): # noqa: E501
"""Your nlu service dashboard. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_nlu_get_details_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dashboard_nlu_get_details" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/dashboard', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dashboard_nlu_get_details_with_params(self, params, **kwargs): # noqa: E501
"""Your nlu service dashboard. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_nlu_get_details_with_params(params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DashboardParams params: Params like 'show_data_objects' that influence the dashboard's response. (required)
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dashboard_nlu_get_details_with_params_with_http_info(params, **kwargs) # noqa: E501
else:
(data) = self.dashboard_nlu_get_details_with_params_with_http_info(params, **kwargs) # noqa: E501
return data
def dashboard_nlu_get_details_with_params_with_http_info(self, params, **kwargs): # noqa: E501
"""Your nlu service dashboard. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_nlu_get_details_with_params_with_http_info(params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DashboardParams params: Params like 'show_data_objects' that influence the dashboard's response. (required)
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['params', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dashboard_nlu_get_details_with_params" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'params' is set
if ('params' not in params or
params['params'] is None):
raise ValueError("Missing the required parameter `params` when calling `dashboard_nlu_get_details_with_params`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'params' in params:
body_params = params['params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/nlu/v2/dashboard', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dashboard_vision_get_details(self, **kwargs): # noqa: E501
"""Your vision service dashboard. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_vision_get_details(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dashboard_vision_get_details_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.dashboard_vision_get_details_with_http_info(**kwargs) # noqa: E501
return data
def dashboard_vision_get_details_with_http_info(self, **kwargs): # noqa: E501
"""Your vision service dashboard. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as POST endpoint, but doesn't allow params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_vision_get_details_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dashboard_vision_get_details" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/vision/v2/dashboard', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def dashboard_vision_get_details_with_params(self, params, **kwargs): # noqa: E501
"""Your vision service dashboard. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_vision_get_details_with_params(params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DashboardParams params: Params like 'show_data_objects' that influence the dashboard's response. (required)
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.dashboard_vision_get_details_with_params_with_http_info(params, **kwargs) # noqa: E501
else:
(data) = self.dashboard_vision_get_details_with_params_with_http_info(params, **kwargs) # noqa: E501
return data
def dashboard_vision_get_details_with_params_with_http_info(self, params, **kwargs): # noqa: E501
"""Your vision service dashboard. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
Get your list of model instances, the API version, etc. Same as GET endpoint, but allows params to be supplied to the operation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.dashboard_vision_get_details_with_params_with_http_info(params, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DashboardParams params: Params like 'show_data_objects' that influence the dashboard's response. (required)
:param str x_caller:
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['params', 'x_caller'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method dashboard_vision_get_details_with_params" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'params' is set
if ('params' not in params or
params['params'] is None):
raise ValueError("Missing the required parameter `params` when calling `dashboard_vision_get_details_with_params`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_caller' in params:
header_params['X-CALLER'] = params['x_caller'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'params' in params:
body_params = params['params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyHeader_old'] # noqa: E501
return self.api_client.call_api(
'/vision/v2/dashboard', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardDetail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.674728
| 158
| 0.631163
| 4,227
| 35,292
| 5.025313
| 0.043766
| 0.047077
| 0.031635
| 0.027116
| 0.975803
| 0.975002
| 0.974343
| 0.970059
| 0.970059
| 0.970059
| 0
| 0.015594
| 0.284087
| 35,292
| 826
| 159
| 42.726392
| 0.825141
| 0.384478
| 0
| 0.842105
| 0
| 0
| 0.18397
| 0.049082
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038902
| false
| 0
| 0.009153
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
97c1eddc0bff53038d000d479886ce72498671cc
| 6,699
|
py
|
Python
|
deep-rl/lib/python2.7/site-packages/OpenGL/raw/GL/SUN/vertex.py
|
ShujaKhalid/deep-rl
|
99c6ba6c3095d1bfdab81bd01395ced96bddd611
|
[
"MIT"
] | 210
|
2016-04-09T14:26:00.000Z
|
2022-03-25T18:36:19.000Z
|
deep-rl/lib/python2.7/site-packages/OpenGL/raw/GL/SUN/vertex.py
|
ShujaKhalid/deep-rl
|
99c6ba6c3095d1bfdab81bd01395ced96bddd611
|
[
"MIT"
] | 72
|
2016-09-04T09:30:19.000Z
|
2022-03-27T17:06:53.000Z
|
deep-rl/lib/python2.7/site-packages/OpenGL/raw/GL/SUN/vertex.py
|
ShujaKhalid/deep-rl
|
99c6ba6c3095d1bfdab81bd01395ced96bddd611
|
[
"MIT"
] | 64
|
2016-04-09T14:26:49.000Z
|
2022-03-21T11:19:47.000Z
|
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_SUN_vertex'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_SUN_vertex',error_checker=_errors._error_checker)
@_f
@_p.types(None,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glColor3fVertex3fSUN(r,g,b,x,y,z):pass
@_f
@_p.types(None,arrays.GLfloatArray,arrays.GLfloatArray)
def glColor3fVertex3fvSUN(c,v):pass
@_f
@_p.types(None,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glColor4fNormal3fVertex3fSUN(r,g,b,a,nx,ny,nz,x,y,z):pass
@_f
@_p.types(None,arrays.GLfloatArray,arrays.GLfloatArray,arrays.GLfloatArray)
def glColor4fNormal3fVertex3fvSUN(c,n,v):pass
@_f
@_p.types(None,_cs.GLubyte,_cs.GLubyte,_cs.GLubyte,_cs.GLubyte,_cs.GLfloat,_cs.GLfloat)
def glColor4ubVertex2fSUN(r,g,b,a,x,y):pass
@_f
@_p.types(None,arrays.GLubyteArray,arrays.GLfloatArray)
def glColor4ubVertex2fvSUN(c,v):pass
@_f
@_p.types(None,_cs.GLubyte,_cs.GLubyte,_cs.GLubyte,_cs.GLubyte,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glColor4ubVertex3fSUN(r,g,b,a,x,y,z):pass
@_f
@_p.types(None,arrays.GLubyteArray,arrays.GLfloatArray)
def glColor4ubVertex3fvSUN(c,v):pass
@_f
@_p.types(None,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glNormal3fVertex3fSUN(nx,ny,nz,x,y,z):pass
@_f
@_p.types(None,arrays.GLfloatArray,arrays.GLfloatArray)
def glNormal3fVertex3fvSUN(n,v):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glReplacementCodeuiColor3fVertex3fSUN(rc,r,g,b,x,y,z):pass
@_f
@_p.types(None,arrays.GLuintArray,arrays.GLfloatArray,arrays.GLfloatArray)
def glReplacementCodeuiColor3fVertex3fvSUN(rc,c,v):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glReplacementCodeuiColor4fNormal3fVertex3fSUN(rc,r,g,b,a,nx,ny,nz,x,y,z):pass
@_f
@_p.types(None,arrays.GLuintArray,arrays.GLfloatArray,arrays.GLfloatArray,arrays.GLfloatArray)
def glReplacementCodeuiColor4fNormal3fVertex3fvSUN(rc,c,n,v):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLubyte,_cs.GLubyte,_cs.GLubyte,_cs.GLubyte,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glReplacementCodeuiColor4ubVertex3fSUN(rc,r,g,b,a,x,y,z):pass
@_f
@_p.types(None,arrays.GLuintArray,arrays.GLubyteArray,arrays.GLfloatArray)
def glReplacementCodeuiColor4ubVertex3fvSUN(rc,c,v):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glReplacementCodeuiNormal3fVertex3fSUN(rc,nx,ny,nz,x,y,z):pass
@_f
@_p.types(None,arrays.GLuintArray,arrays.GLfloatArray,arrays.GLfloatArray)
def glReplacementCodeuiNormal3fVertex3fvSUN(rc,n,v):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glReplacementCodeuiTexCoord2fColor4fNormal3fVertex3fSUN(rc,s,t,r,g,b,a,nx,ny,nz,x,y,z):pass
@_f
@_p.types(None,arrays.GLuintArray,arrays.GLfloatArray,arrays.GLfloatArray,arrays.GLfloatArray,arrays.GLfloatArray)
def glReplacementCodeuiTexCoord2fColor4fNormal3fVertex3fvSUN(rc,tc,c,n,v):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glReplacementCodeuiTexCoord2fNormal3fVertex3fSUN(rc,s,t,nx,ny,nz,x,y,z):pass
@_f
@_p.types(None,arrays.GLuintArray,arrays.GLfloatArray,arrays.GLfloatArray,arrays.GLfloatArray)
def glReplacementCodeuiTexCoord2fNormal3fVertex3fvSUN(rc,tc,n,v):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glReplacementCodeuiTexCoord2fVertex3fSUN(rc,s,t,x,y,z):pass
@_f
@_p.types(None,arrays.GLuintArray,arrays.GLfloatArray,arrays.GLfloatArray)
def glReplacementCodeuiTexCoord2fVertex3fvSUN(rc,tc,v):pass
@_f
@_p.types(None,_cs.GLuint,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glReplacementCodeuiVertex3fSUN(rc,x,y,z):pass
@_f
@_p.types(None,arrays.GLuintArray,arrays.GLfloatArray)
def glReplacementCodeuiVertex3fvSUN(rc,v):pass
@_f
@_p.types(None,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glTexCoord2fColor3fVertex3fSUN(s,t,r,g,b,x,y,z):pass
@_f
@_p.types(None,arrays.GLfloatArray,arrays.GLfloatArray,arrays.GLfloatArray)
def glTexCoord2fColor3fVertex3fvSUN(tc,c,v):pass
@_f
@_p.types(None,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glTexCoord2fColor4fNormal3fVertex3fSUN(s,t,r,g,b,a,nx,ny,nz,x,y,z):pass
@_f
@_p.types(None,arrays.GLfloatArray,arrays.GLfloatArray,arrays.GLfloatArray,arrays.GLfloatArray)
def glTexCoord2fColor4fNormal3fVertex3fvSUN(tc,c,n,v):pass
@_f
@_p.types(None,_cs.GLfloat,_cs.GLfloat,_cs.GLubyte,_cs.GLubyte,_cs.GLubyte,_cs.GLubyte,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glTexCoord2fColor4ubVertex3fSUN(s,t,r,g,b,a,x,y,z):pass
@_f
@_p.types(None,arrays.GLfloatArray,arrays.GLubyteArray,arrays.GLfloatArray)
def glTexCoord2fColor4ubVertex3fvSUN(tc,c,v):pass
@_f
@_p.types(None,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glTexCoord2fNormal3fVertex3fSUN(s,t,nx,ny,nz,x,y,z):pass
@_f
@_p.types(None,arrays.GLfloatArray,arrays.GLfloatArray,arrays.GLfloatArray)
def glTexCoord2fNormal3fVertex3fvSUN(tc,n,v):pass
@_f
@_p.types(None,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glTexCoord2fVertex3fSUN(s,t,x,y,z):pass
@_f
@_p.types(None,arrays.GLfloatArray,arrays.GLfloatArray)
def glTexCoord2fVertex3fvSUN(tc,v):pass
@_f
@_p.types(None,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glTexCoord4fColor4fNormal3fVertex4fSUN(s,t,p,q,r,g,b,a,nx,ny,nz,x,y,z,w):pass
@_f
@_p.types(None,arrays.GLfloatArray,arrays.GLfloatArray,arrays.GLfloatArray,arrays.GLfloatArray)
def glTexCoord4fColor4fNormal3fVertex4fvSUN(tc,c,n,v):pass
@_f
@_p.types(None,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat,_cs.GLfloat)
def glTexCoord4fVertex4fSUN(s,t,p,q,x,y,z,w):pass
@_f
@_p.types(None,arrays.GLfloatArray,arrays.GLfloatArray)
def glTexCoord4fVertex4fvSUN(tc,v):pass
| 49.622222
| 195
| 0.81863
| 1,027
| 6,699
| 5.081792
| 0.104187
| 0.24315
| 0.25503
| 0.413872
| 0.680399
| 0.653573
| 0.652041
| 0.651466
| 0.650508
| 0.645334
| 0
| 0.015748
| 0.033139
| 6,699
| 134
| 196
| 49.992537
| 0.790026
| 0.014928
| 0
| 0.488372
| 1
| 0
| 0.003944
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.317829
| false
| 0.310078
| 0.046512
| 0.007752
| 0.372093
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
c13b593f88afef5498ea1e3579641b62dd58956f
| 108
|
py
|
Python
|
cvxgraphalgs/generators/__init__.py
|
hermish/cvx-graph-algorithms
|
733e137a906bd6c2965d5853d2798a8a01db945c
|
[
"MIT"
] | 7
|
2020-05-11T10:01:31.000Z
|
2021-11-16T16:08:29.000Z
|
cvxgraphalgs/generators/__init__.py
|
hermish/graph-algorithms
|
733e137a906bd6c2965d5853d2798a8a01db945c
|
[
"MIT"
] | 1
|
2020-05-12T16:15:33.000Z
|
2020-06-05T16:40:57.000Z
|
cvxgraphalgs/generators/__init__.py
|
hermish/cvx-graph-algorithms
|
733e137a906bd6c2965d5853d2798a8a01db945c
|
[
"MIT"
] | null | null | null |
from cvxgraphalgs.generators.stochastic_block import *
from cvxgraphalgs.generators.planted_models import *
| 36
| 54
| 0.87037
| 12
| 108
| 7.666667
| 0.666667
| 0.347826
| 0.565217
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 108
| 2
| 55
| 54
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c187ba31db1824816212ac1d876aca3e77248c46
| 77,338
|
py
|
Python
|
infoblox_netmri/api/broker/v3_6_0/device_support_bundle_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_6_0/device_support_bundle_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_6_0/device_support_bundle_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
from ..broker import Broker
class DeviceSupportBundleBroker(Broker):
controller = "device_support_bundles"
def index(self, **kwargs):
"""Lists the available device support bundles. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier of the Device Support Bundle.
:type id: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier of the Device Support Bundle.
:type id: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` id
:param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, name, version, author, enabled_ind, system_ind, neighbor_ind, inventory_ind, environmental_ind, cpu_ind, memory_ind, vlan_ind, forwarding_ind, port_ind, config_ind, created_by, updated_by, created_at, updated_at, valid_ind, unit_tests, status, integrated_ind.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceSupportBundle. Valid values are id, name, version, author, enabled_ind, system_ind, neighbor_ind, inventory_ind, environmental_ind, cpu_ind, memory_ind, vlan_ind, forwarding_ind, port_ind, config_ind, created_by, updated_by, created_at, updated_at, valid_ind, unit_tests, status, integrated_ind. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_support_bundles: An array of the DeviceSupportBundle objects that match the specified input criteria.
:rtype device_support_bundles: Array of DeviceSupportBundle
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def search(self, **kwargs):
"""Lists the available device support bundles matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param author: The author of the Device Support Bundle.
:type author: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param author: The author of the Device Support Bundle.
:type author: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param config_ind: A flag indicating if configuration is shown in Device Viewer for devices.
:type config_ind: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param config_ind: A flag indicating if configuration is shown in Device Viewer for devices.
:type config_ind: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param cpu_ind: A flag indicating if CPU information is shown in Device Viewer for devices.
:type cpu_ind: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param cpu_ind: A flag indicating if CPU information is shown in Device Viewer for devices.
:type cpu_ind: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param created_at: The date and time the Device Support Bundle was created.
:type created_at: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param created_at: The date and time the Device Support Bundle was created.
:type created_at: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param created_by: Indicates by whom Device Support Bundle was created.
:type created_by: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param created_by: Indicates by whom Device Support Bundle was created.
:type created_by: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param enabled_ind: A flag indicating if the Device Support Bundle is enabled.
:type enabled_ind: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param enabled_ind: A flag indicating if the Device Support Bundle is enabled.
:type enabled_ind: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param environmental_ind: A flag indicating if environmental information is shown in Device Viewer for devices.
:type environmental_ind: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param environmental_ind: A flag indicating if environmental information is shown in Device Viewer for devices.
:type environmental_ind: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param forwarding_ind: A flag indicating if forwarding information is shown in Device Viewer for devices.
:type forwarding_ind: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param forwarding_ind: A flag indicating if forwarding information is shown in Device Viewer for devices.
:type forwarding_ind: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier of the Device Support Bundle.
:type id: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier of the Device Support Bundle.
:type id: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param integrated_ind: A flag indicating if the Device Support Bundle is integrated.
:type integrated_ind: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param integrated_ind: A flag indicating if the Device Support Bundle is integrated.
:type integrated_ind: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param inventory_ind: A flag indicating if inventory information is shown in Device Viewer for devices.
:type inventory_ind: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param inventory_ind: A flag indicating if inventory information is shown in Device Viewer for devices.
:type inventory_ind: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param memory_ind: A flag indicating if memory information is shown in Device Viewer for devices.
:type memory_ind: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param memory_ind: A flag indicating if memory information is shown in Device Viewer for devices.
:type memory_ind: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param name: The unique name of the Device Support Bundle.
:type name: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param name: The unique name of the Device Support Bundle.
:type name: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param neighbor_ind: A flag indicating if neighbor information is shown in Device Viewer for devices.
:type neighbor_ind: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param neighbor_ind: A flag indicating if neighbor information is shown in Device Viewer for devices.
:type neighbor_ind: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param port_ind: A flag indicating if port config is shown in Device Viewer for devices.
:type port_ind: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param port_ind: A flag indicating if port config is shown in Device Viewer for devices.
:type port_ind: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param status: The current editing state of the Device Support Bundle.
:type status: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param status: The current editing state of the Device Support Bundle.
:type status: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param system_ind: A flag indicating if system information is shown in Device Viewer for devices.
:type system_ind: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param system_ind: A flag indicating if system information is shown in Device Viewer for devices.
:type system_ind: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param unit_tests: The current state of unit testing for the Device Support Bundle.
:type unit_tests: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param unit_tests: The current state of unit testing for the Device Support Bundle.
:type unit_tests: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param updated_at: The date and time the Device Support Bundle was updated.
:type updated_at: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param updated_at: The date and time the Device Support Bundle was updated.
:type updated_at: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param updated_by: Indicates by whom the Device Support Bundle was updated.
:type updated_by: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param updated_by: Indicates by whom the Device Support Bundle was updated.
:type updated_by: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param valid_ind: A flag indicating whether the Device Support Bundle is valid.
:type valid_ind: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param valid_ind: A flag indicating whether the Device Support Bundle is valid.
:type valid_ind: Array of Boolean
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param version: The version of the Device Support Bundle.
:type version: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param version: The version of the Device Support Bundle.
:type version: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param vlan_ind: A flag indicating if VLAN information is shown in Device Viewer for devices.
:type vlan_ind: Boolean
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param vlan_ind: A flag indicating if VLAN information is shown in Device Viewer for devices.
:type vlan_ind: Array of Boolean
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` id
:param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, name, version, author, enabled_ind, system_ind, neighbor_ind, inventory_ind, environmental_ind, cpu_ind, memory_ind, vlan_ind, forwarding_ind, port_ind, config_ind, created_by, updated_by, created_at, updated_at, valid_ind, unit_tests, status, integrated_ind.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceSupportBundle. Valid values are id, name, version, author, enabled_ind, system_ind, neighbor_ind, inventory_ind, environmental_ind, cpu_ind, memory_ind, vlan_ind, forwarding_ind, port_ind, config_ind, created_by, updated_by, created_at, updated_at, valid_ind, unit_tests, status, integrated_ind. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against device support bundles, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: author, config_ind, cpu_ind, created_at, created_by, enabled_ind, environmental_ind, forwarding_ind, id, integrated_ind, inventory_ind, memory_ind, name, neighbor_ind, port_ind, status, system_ind, unit_tests, updated_at, updated_by, valid_ind, version, vlan_ind.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_support_bundles: An array of the DeviceSupportBundle objects that match the specified input criteria.
:rtype device_support_bundles: Array of DeviceSupportBundle
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available device support bundles matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: author, config_ind, cpu_ind, created_at, created_by, enabled_ind, environmental_ind, forwarding_ind, id, integrated_ind, inventory_ind, memory_ind, name, neighbor_ind, port_ind, status, system_ind, unit_tests, updated_at, updated_by, valid_ind, version, vlan_ind.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_author: The operator to apply to the field author. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. author: The author of the Device Support Bundle. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_author: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_author: If op_author is specified, the field named in this input will be compared to the value in author using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_author must be specified if op_author is specified.
:type val_f_author: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_author: If op_author is specified, this value will be compared to the value in author using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_author must be specified if op_author is specified.
:type val_c_author: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_config_ind: The operator to apply to the field config_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. config_ind: A flag indicating if configuration is shown in Device Viewer for devices. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_config_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_config_ind: If op_config_ind is specified, the field named in this input will be compared to the value in config_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_config_ind must be specified if op_config_ind is specified.
:type val_f_config_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_config_ind: If op_config_ind is specified, this value will be compared to the value in config_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_config_ind must be specified if op_config_ind is specified.
:type val_c_config_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_cpu_ind: The operator to apply to the field cpu_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. cpu_ind: A flag indicating if CPU information is shown in Device Viewer for devices. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_cpu_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_cpu_ind: If op_cpu_ind is specified, the field named in this input will be compared to the value in cpu_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_cpu_ind must be specified if op_cpu_ind is specified.
:type val_f_cpu_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_cpu_ind: If op_cpu_ind is specified, this value will be compared to the value in cpu_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_cpu_ind must be specified if op_cpu_ind is specified.
:type val_c_cpu_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_created_at: The operator to apply to the field created_at. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. created_at: The date and time the Device Support Bundle was created. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_created_at: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_created_at: If op_created_at is specified, the field named in this input will be compared to the value in created_at using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_created_at must be specified if op_created_at is specified.
:type val_f_created_at: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_created_at: If op_created_at is specified, this value will be compared to the value in created_at using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_created_at must be specified if op_created_at is specified.
:type val_c_created_at: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_created_by: The operator to apply to the field created_by. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. created_by: Indicates by whom Device Support Bundle was created. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_created_by: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_created_by: If op_created_by is specified, the field named in this input will be compared to the value in created_by using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_created_by must be specified if op_created_by is specified.
:type val_f_created_by: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_created_by: If op_created_by is specified, this value will be compared to the value in created_by using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_created_by must be specified if op_created_by is specified.
:type val_c_created_by: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_enabled_ind: The operator to apply to the field enabled_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. enabled_ind: A flag indicating if the Device Support Bundle is enabled. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_enabled_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_enabled_ind: If op_enabled_ind is specified, the field named in this input will be compared to the value in enabled_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_enabled_ind must be specified if op_enabled_ind is specified.
:type val_f_enabled_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_enabled_ind: If op_enabled_ind is specified, this value will be compared to the value in enabled_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_enabled_ind must be specified if op_enabled_ind is specified.
:type val_c_enabled_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_environmental_ind: The operator to apply to the field environmental_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. environmental_ind: A flag indicating if environmental information is shown in Device Viewer for devices. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_environmental_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_environmental_ind: If op_environmental_ind is specified, the field named in this input will be compared to the value in environmental_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_environmental_ind must be specified if op_environmental_ind is specified.
:type val_f_environmental_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_environmental_ind: If op_environmental_ind is specified, this value will be compared to the value in environmental_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_environmental_ind must be specified if op_environmental_ind is specified.
:type val_c_environmental_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_forwarding_ind: The operator to apply to the field forwarding_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. forwarding_ind: A flag indicating if forwarding information is shown in Device Viewer for devices. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_forwarding_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_forwarding_ind: If op_forwarding_ind is specified, the field named in this input will be compared to the value in forwarding_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_forwarding_ind must be specified if op_forwarding_ind is specified.
:type val_f_forwarding_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_forwarding_ind: If op_forwarding_ind is specified, this value will be compared to the value in forwarding_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_forwarding_ind must be specified if op_forwarding_ind is specified.
:type val_c_forwarding_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_id: The operator to apply to the field id. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. id: The internal NetMRI identifier of the Device Support Bundle. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_id: If op_id is specified, the field named in this input will be compared to the value in id using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_id must be specified if op_id is specified.
:type val_f_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_id: If op_id is specified, this value will be compared to the value in id using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_id must be specified if op_id is specified.
:type val_c_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_integrated_ind: The operator to apply to the field integrated_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. integrated_ind: A flag indicating if the Device Support Bundle is integrated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_integrated_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_integrated_ind: If op_integrated_ind is specified, the field named in this input will be compared to the value in integrated_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_integrated_ind must be specified if op_integrated_ind is specified.
:type val_f_integrated_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_integrated_ind: If op_integrated_ind is specified, this value will be compared to the value in integrated_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_integrated_ind must be specified if op_integrated_ind is specified.
:type val_c_integrated_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_inventory_ind: The operator to apply to the field inventory_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. inventory_ind: A flag indicating if inventory information is shown in Device Viewer for devices. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_inventory_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_inventory_ind: If op_inventory_ind is specified, the field named in this input will be compared to the value in inventory_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_inventory_ind must be specified if op_inventory_ind is specified.
:type val_f_inventory_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_inventory_ind: If op_inventory_ind is specified, this value will be compared to the value in inventory_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_inventory_ind must be specified if op_inventory_ind is specified.
:type val_c_inventory_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_memory_ind: The operator to apply to the field memory_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. memory_ind: A flag indicating if memory information is shown in Device Viewer for devices. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_memory_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_memory_ind: If op_memory_ind is specified, the field named in this input will be compared to the value in memory_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_memory_ind must be specified if op_memory_ind is specified.
:type val_f_memory_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_memory_ind: If op_memory_ind is specified, this value will be compared to the value in memory_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_memory_ind must be specified if op_memory_ind is specified.
:type val_c_memory_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_name: The operator to apply to the field name. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. name: The unique name of the Device Support Bundle. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_name: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_name: If op_name is specified, the field named in this input will be compared to the value in name using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_name must be specified if op_name is specified.
:type val_f_name: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_name: If op_name is specified, this value will be compared to the value in name using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_name must be specified if op_name is specified.
:type val_c_name: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_neighbor_ind: The operator to apply to the field neighbor_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. neighbor_ind: A flag indicating if neighbor information is shown in Device Viewer for devices. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_neighbor_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_neighbor_ind: If op_neighbor_ind is specified, the field named in this input will be compared to the value in neighbor_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_neighbor_ind must be specified if op_neighbor_ind is specified.
:type val_f_neighbor_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_neighbor_ind: If op_neighbor_ind is specified, this value will be compared to the value in neighbor_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_neighbor_ind must be specified if op_neighbor_ind is specified.
:type val_c_neighbor_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_port_ind: The operator to apply to the field port_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. port_ind: A flag indicating if port config is shown in Device Viewer for devices. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_port_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_port_ind: If op_port_ind is specified, the field named in this input will be compared to the value in port_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_port_ind must be specified if op_port_ind is specified.
:type val_f_port_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_port_ind: If op_port_ind is specified, this value will be compared to the value in port_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_port_ind must be specified if op_port_ind is specified.
:type val_c_port_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_status: The operator to apply to the field status. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. status: The current editing state of the Device Support Bundle. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_status: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_status: If op_status is specified, the field named in this input will be compared to the value in status using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_status must be specified if op_status is specified.
:type val_f_status: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_status: If op_status is specified, this value will be compared to the value in status using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_status must be specified if op_status is specified.
:type val_c_status: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_system_ind: The operator to apply to the field system_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. system_ind: A flag indicating if system information is shown in Device Viewer for devices. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_system_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_system_ind: If op_system_ind is specified, the field named in this input will be compared to the value in system_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_system_ind must be specified if op_system_ind is specified.
:type val_f_system_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_system_ind: If op_system_ind is specified, this value will be compared to the value in system_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_system_ind must be specified if op_system_ind is specified.
:type val_c_system_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_unit_tests: The operator to apply to the field unit_tests. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. unit_tests: The current state of unit testing for the Device Support Bundle. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_unit_tests: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_unit_tests: If op_unit_tests is specified, the field named in this input will be compared to the value in unit_tests using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_unit_tests must be specified if op_unit_tests is specified.
:type val_f_unit_tests: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_unit_tests: If op_unit_tests is specified, this value will be compared to the value in unit_tests using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_unit_tests must be specified if op_unit_tests is specified.
:type val_c_unit_tests: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_updated_at: The operator to apply to the field updated_at. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. updated_at: The date and time the Device Support Bundle was updated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_updated_at: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_updated_at: If op_updated_at is specified, the field named in this input will be compared to the value in updated_at using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_updated_at must be specified if op_updated_at is specified.
:type val_f_updated_at: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_updated_at: If op_updated_at is specified, this value will be compared to the value in updated_at using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_updated_at must be specified if op_updated_at is specified.
:type val_c_updated_at: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_updated_by: The operator to apply to the field updated_by. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. updated_by: Indicates by whom the Device Support Bundle was updated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_updated_by: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_updated_by: If op_updated_by is specified, the field named in this input will be compared to the value in updated_by using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_updated_by must be specified if op_updated_by is specified.
:type val_f_updated_by: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_updated_by: If op_updated_by is specified, this value will be compared to the value in updated_by using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_updated_by must be specified if op_updated_by is specified.
:type val_c_updated_by: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_valid_ind: The operator to apply to the field valid_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. valid_ind: A flag indicating whether the Device Support Bundle is valid. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_valid_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_valid_ind: If op_valid_ind is specified, the field named in this input will be compared to the value in valid_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_valid_ind must be specified if op_valid_ind is specified.
:type val_f_valid_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_valid_ind: If op_valid_ind is specified, this value will be compared to the value in valid_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_valid_ind must be specified if op_valid_ind is specified.
:type val_c_valid_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_version: The operator to apply to the field version. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. version: The version of the Device Support Bundle. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_version: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_version: If op_version is specified, the field named in this input will be compared to the value in version using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_version must be specified if op_version is specified.
:type val_f_version: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_version: If op_version is specified, this value will be compared to the value in version using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_version must be specified if op_version is specified.
:type val_c_version: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_vlan_ind: The operator to apply to the field vlan_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. vlan_ind: A flag indicating if VLAN information is shown in Device Viewer for devices. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_vlan_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_vlan_ind: If op_vlan_ind is specified, the field named in this input will be compared to the value in vlan_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_vlan_ind must be specified if op_vlan_ind is specified.
:type val_f_vlan_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_vlan_ind: If op_vlan_ind is specified, this value will be compared to the value in vlan_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_vlan_ind must be specified if op_vlan_ind is specified.
:type val_c_vlan_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` id
:param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, name, version, author, enabled_ind, system_ind, neighbor_ind, inventory_ind, environmental_ind, cpu_ind, memory_ind, vlan_ind, forwarding_ind, port_ind, config_ind, created_by, updated_by, created_at, updated_at, valid_ind, unit_tests, status, integrated_ind.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each DeviceSupportBundle. Valid values are id, name, version, author, enabled_ind, system_ind, neighbor_ind, inventory_ind, environmental_ind, cpu_ind, memory_ind, vlan_ind, forwarding_ind, port_ind, config_ind, created_by, updated_by, created_at, updated_at, valid_ind, unit_tests, status, integrated_ind. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return device_support_bundles: An array of the DeviceSupportBundle objects that match the specified input criteria.
:rtype device_support_bundles: Array of DeviceSupportBundle
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def destroy(self, **kwargs):
"""Deletes the specified device support bundle from NetMRI.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param id: The internal NetMRI identifier of the Device Support Bundle.
:type id: Integer
**Outputs**
"""
return self.api_request(self._get_method_fullname("destroy"), kwargs)
def delete(self, **kwargs):
"""Delete a device support bundle specified by name
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param dsb_name: Unique device support bundle name
:type dsb_name: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: The id of the output file
:type id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param read: The number of bytes to read from the delete output
:type read: Integer
**Outputs**
"""
return self.api_request(self._get_method_fullname("delete"), kwargs)
def export(self, **kwargs):
"""Export specified device support bundle in tgz format.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param dsb_name: Unique Device Support Bundle name indicating the bundle to export
:type dsb_name: String
**Outputs**
"""
return self.api_request(self._get_method_fullname("export"), kwargs)
def import_data(self, **kwargs):
"""Import Device Support Bundles in bulk via a xml, tgz, tar, or zip file
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param file: Device Support Bundle file contents to be imported
:type file: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: The id of the output file
:type id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param read: The number of bytes to read from the import output
:type read: Integer
**Outputs**
"""
return self.api_request(self._get_method_fullname("import"), kwargs)
def discard(self, **kwargs):
"""Discard all changes to the modified Device Support Bundle
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param dsb_name: Name of the DSB for which changes should be discarded
:type dsb_name: String
**Outputs**
"""
return self.api_request(self._get_method_fullname("discard"), kwargs)
def generate_templates(self, **kwargs):
"""Return DSB file templates
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param dsb_name: The unique name of the new DSB (it will be inserted into template files where necessary)
:type dsb_name: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param type: The type of the DSB template
:type type: String
**Outputs**
"""
return self.api_request(self._get_method_fullname("generate_templates"), kwargs)
def show(self, **kwargs):
"""Return all existing files for a DSB
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param dsb_name: DSB name
:type dsb_name: String
**Outputs**
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def validate(self, **kwargs):
"""Validate DSB files
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param dsb_name: DSB name
:type dsb_name: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param description: DSB config description content
:type description: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ccs_scripts: DSB Perl scripts content
:type ccs_scripts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param perl_scripts: DSB CCS scripts content
:type perl_scripts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: The id of the output file
:type id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param read: The number of bytes already read from the output
:type read: Integer
**Outputs**
"""
return self.api_request(self._get_method_fullname("validate"), kwargs)
def save(self, **kwargs):
"""Save DSB scripts to working directory
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param dsb_name: DSB name
:type dsb_name: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param description: DSB config description content
:type description: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param ccs_scripts: DSB Perl scripts content
:type ccs_scripts: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param perl_scripts: DSB CCS scripts content
:type perl_scripts: String
**Outputs**
"""
return self.api_request(self._get_method_fullname("save"), kwargs)
def install(self, **kwargs):
"""Install a saved, validated DSB script
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param dsb_name: DSB name
:type dsb_name: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: The id of the output file
:type id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param read: The number of bytes already read from the output
:type read: Integer
**Outputs**
"""
return self.api_request(self._get_method_fullname("install"), kwargs)
def test_bundle(self, **kwargs):
"""Test DSB in real-time
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param dsb_name: Unique device support bundle name
:type dsb_name: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param device_ip: Device IP to test the DSB against
:type device_ip: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: The id of the output file
:type id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param read: The number of bytes already read from the test output
:type read: Integer
**Outputs**
"""
return self.api_request(self._get_method_fullname("test_bundle"), kwargs)
def validate_bundle(self, **kwargs):
"""Validate DSB
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param dsb_name: Unique device support bundle name
:type dsb_name: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: The id of the output file
:type id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param read: The number of bytes already read from the validation output
:type read: Integer
**Outputs**
"""
return self.api_request(self._get_method_fullname("validate_bundle"), kwargs)
| 47.680641
| 683
| 0.589503
| 9,909
| 77,338
| 4.494904
| 0.032496
| 0.07903
| 0.05137
| 0.08352
| 0.944432
| 0.943579
| 0.939268
| 0.935182
| 0.905456
| 0.905074
| 0
| 0.004244
| 0.32367
| 77,338
| 1,622
| 684
| 47.680641
| 0.847283
| 0.796193
| 0
| 0
| 0
| 0
| 0.055624
| 0.008998
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0.090909
| 0
| 1.060606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
c1e4cfcc227dfa2ba6fc0733e8dea0f6d12e51df
| 87
|
py
|
Python
|
vttes/tests/common.py
|
forgedconcordance/vttestools
|
55f2b307010ec94e1fa0b5956cfac3381e28d732
|
[
"MIT"
] | null | null | null |
vttes/tests/common.py
|
forgedconcordance/vttestools
|
55f2b307010ec94e1fa0b5956cfac3381e28d732
|
[
"MIT"
] | null | null | null |
vttes/tests/common.py
|
forgedconcordance/vttestools
|
55f2b307010ec94e1fa0b5956cfac3381e28d732
|
[
"MIT"
] | null | null | null |
import synapse.tests.utils as s_t_utils
class VttTstBase(s_t_utils.SynTest):
pass
| 17.4
| 39
| 0.793103
| 15
| 87
| 4.333333
| 0.733333
| 0.061538
| 0.215385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 87
| 4
| 40
| 21.75
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
a9eb31d246ce6c1879597d9cf2a5e2ddb1e85af2
| 167
|
py
|
Python
|
pykeg/backend/__init__.py
|
theshiv303/kegbot-server
|
425b0f8779e0d97aa6ca032b29b2623d693f9fd4
|
[
"MIT"
] | 75
|
2015-01-12T22:51:20.000Z
|
2022-02-23T02:09:50.000Z
|
pykeg/backend/__init__.py
|
theshiv303/kegbot-server
|
425b0f8779e0d97aa6ca032b29b2623d693f9fd4
|
[
"MIT"
] | 83
|
2015-01-03T19:04:46.000Z
|
2021-07-11T19:06:39.000Z
|
pykeg/backend/__init__.py
|
theshiv303/kegbot-server
|
425b0f8779e0d97aa6ca032b29b2623d693f9fd4
|
[
"MIT"
] | 66
|
2015-01-05T01:55:06.000Z
|
2021-11-27T17:07:24.000Z
|
from django.conf import settings
from django.utils.module_loading import import_string
def get_kegbot_backend():
return import_string(settings.KEGBOT_BACKEND)()
| 23.857143
| 53
| 0.826347
| 23
| 167
| 5.73913
| 0.608696
| 0.151515
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107784
| 167
| 6
| 54
| 27.833333
| 0.885906
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.75
| 0.25
| 1.25
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
e77fc5e64b6ddfdf69fd623c59ab8f10b29568f7
| 51,084
|
py
|
Python
|
openshift/client/apis/project_openshift_io_v1_api.py
|
flaper87/openshift-restclient-python
|
13d5d86ca89035b9f596032e7a34f3cc33bf8f18
|
[
"Apache-2.0"
] | null | null | null |
openshift/client/apis/project_openshift_io_v1_api.py
|
flaper87/openshift-restclient-python
|
13d5d86ca89035b9f596032e7a34f3cc33bf8f18
|
[
"Apache-2.0"
] | null | null | null |
openshift/client/apis/project_openshift_io_v1_api.py
|
flaper87/openshift-restclient-python
|
13d5d86ca89035b9f596032e7a34f3cc33bf8f18
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
OpenShift API (with Kubernetes)
OpenShift provides builds, application lifecycle, image content management, and administrative policy on top of Kubernetes. The API allows consistent management of those objects. All API operations are authenticated via an Authorization bearer token that is provided for service accounts as a generated secret (in JWT form) or via the native OAuth endpoint located at /oauth/authorize. Core infrastructure components may use client certificates that require no authentication. All API operations return a 'resourceVersion' string that represents the version of the object in the underlying storage. The standard LIST operation performs a snapshot read of the underlying objects, returning a resourceVersion representing a consistent version of the listed objects. The WATCH operation allows all updates to a set of objects after the provided resourceVersion to be observed by a client. By listing and beginning a watch from the returned resourceVersion, clients may observe a consistent view of the state of one or more objects. Note that WATCH always returns the update after the provided resourceVersion. Watch may be extended a limited time in the past - using etcd 2 the watch window is 1000 events (which on a large cluster may only be a few tens of seconds) so clients must explicitly handle the \"watch to old error\" by re-listing. Objects are divided into two rough categories - those that have a lifecycle and must reflect the state of the cluster, and those that have no state. Objects with lifecycle typically have three main sections: * 'metadata' common to all objects * a 'spec' that represents the desired state * a 'status' that represents how much of the desired state is reflected on the cluster at the current time Objects that have no state have 'metadata' but may lack a 'spec' or 'status' section. Objects are divided into those that are namespace scoped (only exist inside of a namespace) and those that are cluster scoped (exist outside of a namespace). A namespace scoped resource will be deleted when the namespace is deleted and cannot be created if the namespace has not yet been created or is in the process of deletion. Cluster scoped resources are typically only accessible to admins - resources like nodes, persistent volumes, and cluster policy. All objects have a schema that is a combination of the 'kind' and 'apiVersion' fields. This schema is additive only for any given version - no backwards incompatible changes are allowed without incrementing the apiVersion. The server will return and accept a number of standard responses that share a common schema - for instance, the common error type is 'unversioned.Status' (described below) and will be returned on any error from the API server. The API is available in multiple serialization formats - the default is JSON (Accept: application/json and Content-Type: application/json) but clients may also use YAML (application/yaml) or the native Protobuf schema (application/vnd.kubernetes.protobuf). Note that the format of the WATCH API call is slightly different - for JSON it returns newline delimited objects while for Protobuf it returns length-delimited frames (4 bytes in network-order) that contain a 'versioned.Watch' Protobuf object. See the OpenShift documentation at https://docs.openshift.org for more information.
OpenAPI spec version: v3.6.0-alpha.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from kubernetes.client.configuration import Configuration
from ..api_client import ApiClient
class ProjectOpenshiftIoV1Api(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_project_openshift_io_v1_project(self, body, **kwargs):
"""
create a Project
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_project_openshift_io_v1_project(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param V1Project body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Project
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_project_openshift_io_v1_project_with_http_info(body, **kwargs)
else:
(data) = self.create_project_openshift_io_v1_project_with_http_info(body, **kwargs)
return data
def create_project_openshift_io_v1_project_with_http_info(self, body, **kwargs):
"""
create a Project
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_project_openshift_io_v1_project_with_http_info(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param V1Project body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Project
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_project_openshift_io_v1_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_project_openshift_io_v1_project`")
collection_formats = {}
resource_path = '/apis/project.openshift.io/v1/projects'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Project',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_project_openshift_io_v1_project_request(self, body, **kwargs):
"""
create a ProjectRequest
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_project_openshift_io_v1_project_request(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param V1ProjectRequest body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ProjectRequest
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_project_openshift_io_v1_project_request_with_http_info(body, **kwargs)
else:
(data) = self.create_project_openshift_io_v1_project_request_with_http_info(body, **kwargs)
return data
def create_project_openshift_io_v1_project_request_with_http_info(self, body, **kwargs):
"""
create a ProjectRequest
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_project_openshift_io_v1_project_request_with_http_info(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param V1ProjectRequest body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1ProjectRequest
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_project_openshift_io_v1_project_request" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_project_openshift_io_v1_project_request`")
collection_formats = {}
resource_path = '/apis/project.openshift.io/v1/projectrequests'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ProjectRequest',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_project_openshift_io_v1_project(self, name, **kwargs):
"""
delete a Project
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_project_openshift_io_v1_project(name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Project (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: UnversionedStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_project_openshift_io_v1_project_with_http_info(name, **kwargs)
else:
(data) = self.delete_project_openshift_io_v1_project_with_http_info(name, **kwargs)
return data
def delete_project_openshift_io_v1_project_with_http_info(self, name, **kwargs):
"""
delete a Project
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_project_openshift_io_v1_project_with_http_info(name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Project (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: UnversionedStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_project_openshift_io_v1_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_project_openshift_io_v1_project`")
collection_formats = {}
resource_path = '/apis/project.openshift.io/v1/projects/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UnversionedStatus',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_project_openshift_io_v1_api_resources(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_project_openshift_io_v1_api_resources(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: UnversionedAPIResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_project_openshift_io_v1_api_resources_with_http_info(**kwargs)
else:
(data) = self.get_project_openshift_io_v1_api_resources_with_http_info(**kwargs)
return data
def get_project_openshift_io_v1_api_resources_with_http_info(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_project_openshift_io_v1_api_resources_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: UnversionedAPIResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_project_openshift_io_v1_api_resources" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/project.openshift.io/v1/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UnversionedAPIResourceList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_project_openshift_io_v1_project(self, **kwargs):
"""
list or watch objects of kind Project
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_project_openshift_io_v1_project(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1ProjectList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_project_openshift_io_v1_project_with_http_info(**kwargs)
else:
(data) = self.list_project_openshift_io_v1_project_with_http_info(**kwargs)
return data
def list_project_openshift_io_v1_project_with_http_info(self, **kwargs):
"""
list or watch objects of kind Project
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_project_openshift_io_v1_project_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1ProjectList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['pretty', 'field_selector', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_project_openshift_io_v1_project" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/project.openshift.io/v1/projects'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1ProjectList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_project_openshift_io_v1_project_request(self, **kwargs):
"""
list objects of kind ProjectRequest
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_project_openshift_io_v1_project_request(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: UnversionedStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_project_openshift_io_v1_project_request_with_http_info(**kwargs)
else:
(data) = self.list_project_openshift_io_v1_project_request_with_http_info(**kwargs)
return data
def list_project_openshift_io_v1_project_request_with_http_info(self, **kwargs):
"""
list objects of kind ProjectRequest
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_project_openshift_io_v1_project_request_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: UnversionedStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['pretty', 'field_selector', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_project_openshift_io_v1_project_request" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/project.openshift.io/v1/projectrequests'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UnversionedStatus',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_project_openshift_io_v1_project(self, name, body, **kwargs):
"""
partially update the specified Project
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_project_openshift_io_v1_project(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Project (required)
:param UnversionedPatch body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Project
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_project_openshift_io_v1_project_with_http_info(name, body, **kwargs)
else:
(data) = self.patch_project_openshift_io_v1_project_with_http_info(name, body, **kwargs)
return data
def patch_project_openshift_io_v1_project_with_http_info(self, name, body, **kwargs):
"""
partially update the specified Project
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_project_openshift_io_v1_project_with_http_info(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Project (required)
:param UnversionedPatch body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Project
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_project_openshift_io_v1_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_project_openshift_io_v1_project`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_project_openshift_io_v1_project`")
collection_formats = {}
resource_path = '/apis/project.openshift.io/v1/projects/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Project',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_project_openshift_io_v1_project(self, name, **kwargs):
"""
read the specified Project
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_project_openshift_io_v1_project(name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Project (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Project
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_project_openshift_io_v1_project_with_http_info(name, **kwargs)
else:
(data) = self.read_project_openshift_io_v1_project_with_http_info(name, **kwargs)
return data
def read_project_openshift_io_v1_project_with_http_info(self, name, **kwargs):
"""
read the specified Project
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_project_openshift_io_v1_project_with_http_info(name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Project (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Project
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_project_openshift_io_v1_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_project_openshift_io_v1_project`")
collection_formats = {}
resource_path = '/apis/project.openshift.io/v1/projects/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Project',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_project_openshift_io_v1_project(self, name, body, **kwargs):
"""
replace the specified Project
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_project_openshift_io_v1_project(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Project (required)
:param V1Project body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Project
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_project_openshift_io_v1_project_with_http_info(name, body, **kwargs)
else:
(data) = self.replace_project_openshift_io_v1_project_with_http_info(name, body, **kwargs)
return data
def replace_project_openshift_io_v1_project_with_http_info(self, name, body, **kwargs):
"""
replace the specified Project
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_project_openshift_io_v1_project_with_http_info(name, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Project (required)
:param V1Project body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1Project
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_project_openshift_io_v1_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_project_openshift_io_v1_project`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_project_openshift_io_v1_project`")
collection_formats = {}
resource_path = '/apis/project.openshift.io/v1/projects/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Project',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 48.605138
| 3,330
| 0.606726
| 5,507
| 51,084
| 5.405484
| 0.069366
| 0.048374
| 0.048374
| 0.053749
| 0.888236
| 0.886623
| 0.885548
| 0.877755
| 0.8707
| 0.867341
| 0
| 0.003376
| 0.315715
| 51,084
| 1,050
| 3,331
| 48.651429
| 0.848209
| 0.375069
| 0
| 0.834286
| 0
| 0
| 0.196297
| 0.077672
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03619
| false
| 0
| 0.013333
| 0
| 0.102857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
99b784ea762fa79ecdc3e367501447aed934f052
| 74
|
py
|
Python
|
multilingual_t5/r_indic_corp_hi/__init__.py
|
sumanthd17/mt5
|
c99b4e3ad1c69908c852c730a1323ccb52d48f58
|
[
"Apache-2.0"
] | null | null | null |
multilingual_t5/r_indic_corp_hi/__init__.py
|
sumanthd17/mt5
|
c99b4e3ad1c69908c852c730a1323ccb52d48f58
|
[
"Apache-2.0"
] | null | null | null |
multilingual_t5/r_indic_corp_hi/__init__.py
|
sumanthd17/mt5
|
c99b4e3ad1c69908c852c730a1323ccb52d48f58
|
[
"Apache-2.0"
] | null | null | null |
"""r_indic_corp_hi dataset."""
from .r_indic_corp_hi import RIndicCorpHi
| 18.5
| 41
| 0.797297
| 12
| 74
| 4.416667
| 0.666667
| 0.226415
| 0.377358
| 0.45283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094595
| 74
| 3
| 42
| 24.666667
| 0.791045
| 0.324324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
82196da273f06f3d3a7635d9543ec944bb1c88dc
| 128
|
py
|
Python
|
moonsense/models/__init__.py
|
moonsense/python-sdk
|
0ac6e03ded5c4c6fb001dd93bd29201555bce56c
|
[
"Apache-2.0"
] | 6
|
2021-06-02T15:29:05.000Z
|
2022-03-21T20:13:40.000Z
|
moonsense/models/__init__.py
|
moonsense/python-sdk
|
0ac6e03ded5c4c6fb001dd93bd29201555bce56c
|
[
"Apache-2.0"
] | 1
|
2021-12-16T09:45:49.000Z
|
2021-12-16T09:45:49.000Z
|
moonsense/models/__init__.py
|
moonsense/python-sdk
|
0ac6e03ded5c4c6fb001dd93bd29201555bce56c
|
[
"Apache-2.0"
] | null | null | null |
from .common_v2_pb2 import *
from .bundle_v2_pb2 import *
from .control_plane_v2_pb2 import *
from .data_plane_v2_pb2 import *
| 21.333333
| 35
| 0.804688
| 22
| 128
| 4.227273
| 0.409091
| 0.215054
| 0.473118
| 0.483871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072072
| 0.132813
| 128
| 5
| 36
| 25.6
| 0.765766
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
413582b3aa1150aa2583c89e3333be5d6a6eb0d1
| 41
|
py
|
Python
|
ir_export_extended_ept/py/__init__.py
|
lester-lees/extra_addons_sz
|
cddaf972cf4ea64c553bcff0006eb006a115d5ee
|
[
"Apache-2.0"
] | null | null | null |
ir_export_extended_ept/py/__init__.py
|
lester-lees/extra_addons_sz
|
cddaf972cf4ea64c553bcff0006eb006a115d5ee
|
[
"Apache-2.0"
] | null | null | null |
ir_export_extended_ept/py/__init__.py
|
lester-lees/extra_addons_sz
|
cddaf972cf4ea64c553bcff0006eb006a115d5ee
|
[
"Apache-2.0"
] | null | null | null |
import ir_exports
import ir_exports_line
| 13.666667
| 22
| 0.902439
| 7
| 41
| 4.857143
| 0.571429
| 0.470588
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 2
| 23
| 20.5
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4197dc883e284a4a71e71e025308ce1eec697df6
| 7,060
|
py
|
Python
|
components/core/qcg/pilotjob/tests/test_iterschedulers.py
|
LourensVeen/QCG-PilotJob
|
e78c35a9b16b1042a2d5b54352a2ca2e3a58c6b9
|
[
"Apache-2.0"
] | null | null | null |
components/core/qcg/pilotjob/tests/test_iterschedulers.py
|
LourensVeen/QCG-PilotJob
|
e78c35a9b16b1042a2d5b54352a2ca2e3a58c6b9
|
[
"Apache-2.0"
] | null | null | null |
components/core/qcg/pilotjob/tests/test_iterschedulers.py
|
LourensVeen/QCG-PilotJob
|
e78c35a9b16b1042a2d5b54352a2ca2e3a58c6b9
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from qcg.pilotjob.iterscheduler import IterScheduler, MaximumIters, SplitInto, DefaultScheduler
def test_iterscheduler_parsing():
assert IterScheduler.get_scheduler('maximum-iters') == MaximumIters
assert IterScheduler.get_scheduler('MAXIMUM-ITERS') == MaximumIters
assert IterScheduler.get_scheduler('Maximum-Iters') == MaximumIters
assert IterScheduler.get_scheduler('split-into') == SplitInto
assert IterScheduler.get_scheduler('SPLIT-INTO') == SplitInto
assert IterScheduler.get_scheduler('SpliT-Into') == SplitInto
assert IterScheduler.get_scheduler('unknown') == DefaultScheduler
def test_iterscheduler_splitinto():
iters = 10
resources = 10
split_into = 10
si_sched_gen = IterScheduler.get_scheduler('split-into')({'min': 1}, iters, resources, parts=split_into).generate()
for i in range(iters):
job_iter_res = next(si_sched_gen)
assert job_iter_res and all(('exact', job_iter_res['exact'] == resources / iters)),\
str(job_iter_res)
with pytest.raises(StopIteration):
next(si_sched_gen)
resources = 10
split_into = 5
si_sched_gen = IterScheduler.get_scheduler('split-into')({'min': 1}, iters, resources, parts=split_into).generate()
for i in range(iters):
job_iter_res = next(si_sched_gen)
assert job_iter_res and all(('exact' in job_iter_res, job_iter_res['exact'] == resources / split_into)),\
str(job_iter_res)
with pytest.raises(StopIteration):
next(si_sched_gen)
resources = 10
split_into = 2
si_sched_gen = IterScheduler.get_scheduler('split-into')({'min': 1}, iters, resources, parts=split_into).generate()
for i in range(iters):
job_iter_res = next(si_sched_gen)
assert job_iter_res and all(('exact' in job_iter_res, job_iter_res['exact'] == resources / split_into)), \
str(job_iter_res)
with pytest.raises(StopIteration):
next(si_sched_gen)
# default 'parts' as number of iterations
resources = 10
si_sched_gen = IterScheduler.get_scheduler('split-into')({'min': 1}, iters, resources).generate()
for i in range(iters):
job_iter_res = next(si_sched_gen)
assert job_iter_res and all(('exact' in job_iter_res, job_iter_res['exact'] == resources / iters)), \
str(job_iter_res)
with pytest.raises(StopIteration):
next(si_sched_gen)
def test_iterscheduler_maximum_iters():
# all iterations in single round
iters = 10
resources = 10
mi_sched_gen = IterScheduler.get_scheduler('maximum-iters')({'min': 1, }, iters, resources).generate()
for i in range(iters):
job_iter_res = next(mi_sched_gen)
assert job_iter_res and all(('exact' in job_iter_res, job_iter_res['exact'] == 1)), \
str(job_iter_res)
with pytest.raises(StopIteration):
next(mi_sched_gen)
# two rounds
iters = 20
resources = 10
mi_sched_gen = IterScheduler.get_scheduler('maximum-iters')({'min': 1, }, iters, resources).generate()
for i in range(iters):
job_iter_res = next(mi_sched_gen)
assert job_iter_res and all(('exact' in job_iter_res, job_iter_res['exact'] == 1)), \
str(job_iter_res)
with pytest.raises(StopIteration):
next(mi_sched_gen)
# single rounds, with two resources
iters = 5
resources = 10
mi_sched_gen = IterScheduler.get_scheduler('maximum-iters')({'min': 1, }, iters, resources).generate()
for i in range(iters):
job_iter_res = next(mi_sched_gen)
assert job_iter_res and all(('exact' in job_iter_res, job_iter_res['exact'] == 2)), \
str(job_iter_res)
with pytest.raises(StopIteration):
next(mi_sched_gen)
# single rounds, 4, 3, 3
iters = 3
resources = 10
res = [3, 3, 4]
mi_sched_gen = IterScheduler.get_scheduler('maximum-iters')({'min': 1, }, iters, resources).generate()
for i in range(iters):
job_iter_res = next(mi_sched_gen)
assert job_iter_res and all(('exact' in job_iter_res, job_iter_res['exact'] == res[i])), \
str(job_iter_res)
with pytest.raises(StopIteration):
next(mi_sched_gen)
# single rounds, 4, 3, 3
iters = 3
resources = 10
res = [3, 3, 4]
mi_sched_gen = IterScheduler.get_scheduler('maximum-iters')({}, iters, resources).generate()
for i in range(iters):
job_iter_res = next(mi_sched_gen)
assert job_iter_res and all(('exact' in job_iter_res, job_iter_res['exact'] == res[i])), \
str(job_iter_res)
with pytest.raises(StopIteration):
next(mi_sched_gen)
# single rounds, 4, 4, 3
iters = 3
resources = 11
res = [3, 4, 4]
mi_sched_gen = IterScheduler.get_scheduler('maximum-iters')({}, iters, resources).generate()
for i in range(iters):
job_iter_res = next(mi_sched_gen)
assert job_iter_res and all(('exact' in job_iter_res, job_iter_res['exact'] == res[i])), \
str(job_iter_res)
with pytest.raises(StopIteration):
next(mi_sched_gen)
# single rounds, 4, 4, 3
iters = 3
resources = 11
res = [3, 4, 4]
mi_sched_gen = IterScheduler.get_scheduler('maximum-iters')({'min': 3}, iters, resources).generate()
for i in range(iters):
job_iter_res = next(mi_sched_gen)
assert job_iter_res and all(('exact' in job_iter_res, job_iter_res['exact'] == res[i])), \
str(job_iter_res)
with pytest.raises(StopIteration):
next(mi_sched_gen)
# two rounds (two jobs in first, single in second), 6, 5, 11
iters = 3
resources = 11
res = [5, 6, 11]
mi_sched_gen = IterScheduler.get_scheduler('maximum-iters')({'min': 5}, iters, resources).generate()
for i in range(iters):
job_iter_res = next(mi_sched_gen)
assert job_iter_res and all(('exact' in job_iter_res, job_iter_res['exact'] == res[i])), \
"{} - {}".format(i, str(job_iter_res))
with pytest.raises(StopIteration):
next(mi_sched_gen)
# two rounds (2 jobs in single), 6, 5, 6, 5
iters = 4
resources = 11
res = [5, 6, 5, 6]
mi_sched_gen = IterScheduler.get_scheduler('maximum-iters')({'min': 5}, iters, resources).generate()
for i in range(iters):
job_iter_res = next(mi_sched_gen)
assert job_iter_res and all(('exact' in job_iter_res, job_iter_res['exact'] == res[i])), \
"{} - {}".format(i, str(job_iter_res))
with pytest.raises(StopIteration):
next(mi_sched_gen)
# four rounds (1 job in round), 11, 11, 11, 11
iters = 4
resources = 11
res = [11, 11, 11, 11]
mi_sched_gen = IterScheduler.get_scheduler('maximum-iters')({'min': 6}, iters, resources).generate()
for i in range(iters):
job_iter_res = next(mi_sched_gen)
assert job_iter_res and all(('exact' in job_iter_res, job_iter_res['exact'] == res[i])), \
"{} - {}".format(i, str(job_iter_res))
with pytest.raises(StopIteration):
next(mi_sched_gen)
| 39.887006
| 119
| 0.652975
| 993
| 7,060
| 4.382679
| 0.066465
| 0.110983
| 0.158548
| 0.064338
| 0.901654
| 0.886949
| 0.886949
| 0.886949
| 0.886949
| 0.886949
| 0
| 0.021578
| 0.218839
| 7,060
| 176
| 120
| 40.113636
| 0.767543
| 0.05
| 0
| 0.82069
| 0
| 0
| 0.066179
| 0
| 0
| 0
| 0
| 0
| 0.144828
| 1
| 0.02069
| false
| 0
| 0.013793
| 0
| 0.034483
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
419f5d28e8e6fa68d994dfffcd1cde45efc1f5ea
| 136
|
py
|
Python
|
api/views.py
|
annevandalfsen/screenbird
|
38b70302be3b3dc0c74b6aae8e09666115592aef
|
[
"MIT",
"Unlicense"
] | 121
|
2015-01-01T23:31:36.000Z
|
2021-05-27T04:24:44.000Z
|
api/views.py
|
annevandalfsen/screenbird
|
38b70302be3b3dc0c74b6aae8e09666115592aef
|
[
"MIT",
"Unlicense"
] | 1
|
2017-02-08T04:34:14.000Z
|
2017-02-08T04:34:14.000Z
|
api/views.py
|
annevandalfsen/screenbird
|
38b70302be3b3dc0c74b6aae8e09666115592aef
|
[
"MIT",
"Unlicense"
] | 31
|
2015-01-13T00:23:33.000Z
|
2017-05-13T21:50:29.000Z
|
def config_record_on_account(request,account_id):
pass
def config_record_on_channel(request, channel_id):
pass
| 17
| 50
| 0.705882
| 18
| 136
| 4.888889
| 0.5
| 0.204545
| 0.340909
| 0.386364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 136
| 7
| 51
| 19.428571
| 0.846154
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
41bbf2787754bba6b1c72e243963d17373bc02e6
| 470
|
py
|
Python
|
exercises/exe11 - 20/exe014.py
|
thomas-rohde/Classes-Python
|
f862995510b7aabf68bc14aecf815f597034d8a1
|
[
"MIT"
] | null | null | null |
exercises/exe11 - 20/exe014.py
|
thomas-rohde/Classes-Python
|
f862995510b7aabf68bc14aecf815f597034d8a1
|
[
"MIT"
] | null | null | null |
exercises/exe11 - 20/exe014.py
|
thomas-rohde/Classes-Python
|
f862995510b7aabf68bc14aecf815f597034d8a1
|
[
"MIT"
] | null | null | null |
'''import math
n = float(input('Digite um nº: '))
print('O valor digitado foi {}, tendo sua parte inteira {}, e decimal {:.3f}'.format(n,math.trunc(n), n - math.trunc(n)))'''
'''from math import trunc
n = float(input('Digite um nº: '))
print('O valor digitado foi {}, tendo sua parte inteira {}, e decimal {:.3f}'.format(n, trunc(n), n - trunc(n)))'''
n = float(input('Digite um nº: '))
print('O valor digitado foi {}, tendo sua parte inteira {}'.format(n, int(n)))
| 36.153846
| 124
| 0.631915
| 77
| 470
| 3.857143
| 0.311688
| 0.10101
| 0.111111
| 0.171717
| 0.750842
| 0.750842
| 0.750842
| 0.750842
| 0.750842
| 0.750842
| 0
| 0.005025
| 0.153191
| 470
| 13
| 125
| 36.153846
| 0.741206
| 0.359574
| 0
| 0
| 0
| 0
| 0.546218
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
ec0cf3fd597003746f562587b03aba68d0d218bd
| 82
|
py
|
Python
|
pbpl/common/meep_units.py
|
ucla-pbpl/pbpl-common
|
959aea73b6969e2c06654bc920cc5a57787f81a8
|
[
"MIT"
] | null | null | null |
pbpl/common/meep_units.py
|
ucla-pbpl/pbpl-common
|
959aea73b6969e2c06654bc920cc5a57787f81a8
|
[
"MIT"
] | null | null | null |
pbpl/common/meep_units.py
|
ucla-pbpl/pbpl-common
|
959aea73b6969e2c06654bc920cc5a57787f81a8
|
[
"MIT"
] | null | null | null |
from .units import define_constants
define_constants('MEEP')
from .units import *
| 20.5
| 35
| 0.804878
| 11
| 82
| 5.818182
| 0.545455
| 0.28125
| 0.46875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109756
| 82
| 3
| 36
| 27.333333
| 0.876712
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ec15cb348ada2875583084f4181ad1e5e4399cc0
| 101
|
py
|
Python
|
craigsbot/parser/__init__.py
|
mohsinhaider/craigsbot
|
e7a6237024c1a06b17fed326b93069085bcb3e3d
|
[
"MIT"
] | null | null | null |
craigsbot/parser/__init__.py
|
mohsinhaider/craigsbot
|
e7a6237024c1a06b17fed326b93069085bcb3e3d
|
[
"MIT"
] | null | null | null |
craigsbot/parser/__init__.py
|
mohsinhaider/craigsbot
|
e7a6237024c1a06b17fed326b93069085bcb3e3d
|
[
"MIT"
] | null | null | null |
from craigsbot.parser.craigslist_parser import CraigslistSearchResultsParser, CraigslistPostingParser
| 101
| 101
| 0.930693
| 8
| 101
| 11.625
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039604
| 101
| 1
| 101
| 101
| 0.958763
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ec1d4c88551a2cf8a9fc16177272d3e04d273afa
| 21,758
|
py
|
Python
|
data_analysis/gaze/pipelines.py
|
vedb/data_analysis
|
b46f58ba424680353d3abd0014a7d0a339bf6e6c
|
[
"MIT"
] | null | null | null |
data_analysis/gaze/pipelines.py
|
vedb/data_analysis
|
b46f58ba424680353d3abd0014a7d0a339bf6e6c
|
[
"MIT"
] | null | null | null |
data_analysis/gaze/pipelines.py
|
vedb/data_analysis
|
b46f58ba424680353d3abd0014a7d0a339bf6e6c
|
[
"MIT"
] | null | null | null |
# Calibration script
# import vm_preproc as vmp
# import vedb_store
import numpy as np
import tqdm
import os
from . import pupil_detection_pl, calibrate_pl
from . import marker_detection, gaze_utils
from pupil_recording_interface.externals.gaze_mappers import Binocular_Gaze_Mapper
#
# def pupil_2d_monocular_v01(
# session_folder,
# sname=None, # Base for each file?
# tag="pupil_2d_monocular_v01",
# output_path=None,
# batch_size_pupil="auto",
# batch_size_marker="auto",
# marker_rescale=0.5,
# progress_bar=tqdm.tqdm,
# properties=None,
# ):
# """
# Parameters
# ----------
# tag : A short label for the pipeline
# session_folder : string
# file path to session. Ultimately want to replace this with a session
# object from the database.
# sname : format string
# must contain {step}; if provided, tag and output_path are ignored
#
# Notes
# -----
# Ultimately, for saving files, we want the output of each step saved
# along with the function and parameters that were used to generate it.
#
# This is not the case now. Needs work.
# """
# # Deal with inputs
# if output_path is None:
# output_path = session_folder
# if sname is None:
# sname = os.path.join(output_path, "gaze_vedb", tag + "_{step}.npz")
# fdir, _ = os.path.split(sname)
# if not os.path.exists(fdir):
# print("creating", fdir)
# os.makedirs(fdir)
# # (0) Get session
# ses = vedb_store.Session(folder=session_folder)
# # (1) Pupil detection (L, R)
# fn_pupil = pupil_detection_pl.plabs_detect_pupil
#
# pupil_file_left = sname.format(step="pupilpos_left")
# if os.path.exists(pupil_file_left):
# print("Loading pupils left")
# pupil_arrays_left = {}
# dat = np.load(pupil_file_left, allow_pickle=True)
# for k in dat.keys():
# pupil_arrays_left[k] = dat[k]
# pupil_list_left = gaze_utils.arraydict_to_dictlist(pupil_arrays_left)
# else:
# # Get eye files (Left eye)
# eye_left_time_file, eye_left_video_file = ses.paths["eye_left"]
# inputs_pupil_left = dict(
# fpaths=dict(eye_video=eye_left_video_file, timestamps=eye_left_time_file,),
# variable_names=None,
# )
# print("\n\nRunning pupil detection for the left eye\n\n")
# # Run pupil detection
# pupil_list_left = vmp.utils.batch_run(
# fn_pupil,
# inputs_pupil_left,
# batch_size=batch_size_pupil,
# batch_combine_fn=vmp.utils.list_reduce,
# progress_bar=progress_bar,
# id=1, # left eye # FIX ME: OPTION HERE FOR R, L, or binocular
# properties=properties,
# )
# # Get arrays instead of list of dicts
# pupil_arrays_left = gaze_utils.dictlist_to_arraydict(pupil_list_left)
# # Save pupil detection
# pupil_file_left = sname.format(step="pupilpos_left")
# np.savez(pupil_file_left, **pupil_arrays_left)
#
# pupil_file_right = sname.format(step="pupilpos_right")
# if os.path.exists(pupil_file_right):
# print("Loading pupils right")
# pupil_arrays_right = {}
# dat = np.load(pupil_file_right, allow_pickle=True)
# for k in dat.keys():
# pupil_arrays_right[k] = dat[k]
# pupil_list_right = gaze_utils.arraydict_to_dictlist(pupil_arrays_right)
# else:
# # Get eye files (Right eye)
# eye_right_time_file, eye_right_video_file = ses.paths["eye_right"]
# inputs_pupil_right = dict(
# fpaths=dict(
# eye_video=eye_right_video_file, timestamps=eye_right_time_file,
# ),
# variable_names=None,
# )
# print("\n\nRunning pupil detection for the right eye\n\n")
# # Run pupil detection
# pupil_list_right = vmp.utils.batch_run(
# fn_pupil,
# inputs_pupil_right,
# batch_size=batch_size_pupil,
# batch_combine_fn=vmp.utils.list_reduce,
# progress_bar=progress_bar,
# id=0, # right eye # FIX ME: OPTION HERE FOR R, L, or binocular
# properties=properties,
# )
# # Get arrays instead of list of dicts
# pupil_arrays_right = gaze_utils.dictlist_to_arraydict(pupil_list_right)
# # Save pupil detection
#
# np.savez(pupil_file_right, **pupil_arrays_right)
#
# # (2) Marker detection
# ref_file = sname.format(step="markerpos")
# # Get world video files
# world_time_file, world_video_file = ses.paths["world_camera"]
# # Load 1 second of data 10 seconds in (to allow time for camera to start)
# world_time, world_video = ses.load("world_camera", idx=(10, 11))
# _, video_vdim, video_hdim = world_video.shape[:3]
#
# if os.path.exists(ref_file):
# print("Loading markers")
# ref_arrays = {}
# dat = np.load(ref_file, allow_pickle=True)
# for k in dat.keys():
# ref_arrays[k] = dat[k]
# ref_list = gaze_utils.arraydict_to_dictlist(ref_arrays)
# else:
# fn_marker = marker_detection.find_concentric_circles
# inputs_marker = dict(
# fpaths=dict(video_data=world_video_file, timestamps=world_time_file),
# variable_names=None,
# )
# print("\n\nRunning marker detection \n\n")
# # Run marker detection
# ref_list = vmp.utils.batch_run(
# fn_marker,
# inputs_marker,
# batch_size=batch_size_marker,
# batch_combine_fn=vmp.utils.list_reduce,
# scale=marker_rescale,
# progress_bar=progress_bar,
# )
# # Get arrays instead of dicts
# ref_arrays = gaze_utils.dictlist_to_arraydict(ref_list)
# # Save calibration markers
# np.savez(ref_file, **ref_arrays)
#
# # (3) Calibrate
# # Get data for pupil calibration
# print("\n\nGetting data for calibration \n\n")
# is_binocular, matched_data_left = calibrate_pl.get_data(pupil_list_left, ref_list)
# # Run calibration
# # NOTE: zero index for matched_data here is because this is simply monocular,
# # and matched data only returns a 1-long tuple. If we want binocular, this will
# # need changing.
# print("\n\nRunning 2d monocular calibration [left eye] \n\n")
# method, result_left = calibrate_pl.calibrate_2d_monocular(
# matched_data_left[0], frame_size=(video_vdim, video_hdim)
# )
# # Create mapper for gaze
# cx, cy, n = result_left["args"]["params"]
# mapper_left = calibrate_pl.calibrate_2d.make_map_function(cx, cy, n)
#
# # (4) Map gaze to video coordinates
# # Mapper takes two inputs: normalized pupil x and y position
# print("\n\nRunning gaze mapper [left eye] \n\n")
# pupil_x, pupil_y = pupil_arrays_left["norm_pos"].T
# gaze_left = mapper_left([pupil_x, pupil_y])
# # Transpose output so time is the first dimension
# gaze_left = np.vstack(gaze_left).T
#
# is_binocular, matched_data_right = calibrate_pl.get_data(pupil_list_right, ref_list)
# # Run calibration
# # NOTE: zero index for matched_data here is because this is simply monocular,
# # and matched data only returns a 1-long tuple. If we want binocular, this will
# # need changing.
# print("\n\nRunning 2d monocular calibration [right eye] \n\n")
# method, result_right = calibrate_pl.calibrate_2d_monocular(
# matched_data_right[0], frame_size=(video_vdim, video_hdim)
# )
# # Create mapper for gaze
# cx, cy, n = result_right["args"]["params"]
# mapper_right = calibrate_pl.calibrate_2d.make_map_function(cx, cy, n)
#
# # (4) Map gaze to video coordinates
# # Mapper takes two inputs: normalized pupil x and y position
# print("\n\nRunning gaze mapper [right eye] \n\n")
# pupil_x, pupil_y = pupil_arrays_right["norm_pos"].T
# gaze_right = mapper_right([pupil_x, pupil_y])
# # Transpose output so time is the first dimension
# gaze_right = np.vstack(gaze_right).T
#
# gaze_file = sname.format(step="gaze")
# np.savez(gaze_file, gaze_left=gaze_left, gaze_right=gaze_right)
# return gaze_left, gaze_right
#
# def pupil_2d_binocular_v01(
# session_folder,
# param_dict,
# string_name=None, # Base for each file?
# tag="pupil_2d_binocular_v02",
# output_path=None,
# batch_size_pupil="auto",
# batch_size_marker="auto",
# marker_rescale=1,
# progress_bar=tqdm.tqdm,
# properties=None,
# ):
# """
# Parameters
# ----------
# tag : A short label for the pipeline
# session_folder : string
# file path to session. Ultimately want to replace this with a session
# object from the database.
# string_name : format string
# must contain {step}; if provided, tag and output_path are ignored
#
# Notes
# -----
# Ultimately, for saving files, we want the output of each step saved
# along with the function and parameters that were used to generate it.
#
# This is not the case now. Needs work.
# """
# print(param_dict.keys())
# # Todo: Read length of the session id from parameters?
# session_id = session_folder[-19:] + '/'
# output_path = param_dict['directory']['gaze_directory'] + session_id
# # Deal with inputs
# if output_path is None:
# #output_path = session_folder
# raise ValueError("parameters' yaml file doesn't have valid gaze saving_directory!")
# else:
# print("saving results to: ", output_path)
#
# tag = param_dict['calibration']['pupil_detection'] + '_' +\
# param_dict['calibration']['eye'] + '_' +\
# param_dict['calibration']['algorithm']
# print('tag : ', tag)
# if string_name is None:
# string_name = os.path.join(output_path, tag + "_{step}.npz")
# print("file_name", string_name)
# #fdir, _ = os.path.split(string_name)
# if not os.path.exists(output_path):
# print("creating", output_path)
# os.makedirs(output_path)
# # (0) Get session
# session = vedb_store.Session(folder=session_folder)
# # (1) Pupil detection (L, R)
# fn_pupil = pupil_detection_pl.plabs_detect_pupil
#
# pupil_file_left = string_name.format(step="pupil_pos_left")
# if os.path.exists(pupil_file_left):
# print("Loading pupils left")
# pupil_arrays_left = {}
# data = np.load(pupil_file_left, allow_pickle=True)
# for k in data.keys():
# pupil_arrays_left[k] = data[k]
# pupil_list_left = gaze_utils.arraydict_to_dictlist(pupil_arrays_left)
# else:
# # Get eye files (Left eye)
# eye_left_time_file, eye_left_video_file = session.paths["eye_left"]
# inputs_pupil_left = dict(
# fpaths=dict(eye_video=eye_left_video_file, timestamps=eye_left_time_file,),
# variable_names=None,
# )
# print("\n\nRunning pupil detection for the left eye\n\n")
# # Run pupil detection
# pupil_list_left = vmp.utils.batch_run(
# fn_pupil,
# inputs_pupil_left,
# batch_size=batch_size_pupil,
# batch_combine_fn=vmp.utils.list_reduce,
# progress_bar=progress_bar,
# id=1, # left eye # FIX ME: OPTION HERE FOR R, L, or binocular
# properties=properties,
# )
# # Get arrays instead of list of dicts
# pupil_arrays_left = gaze_utils.dictlist_to_arraydict(pupil_list_left)
# # Save pupil detection
# pupil_file_left = string_name.format(step="pupil_pos_left")
# np.savez(pupil_file_left, **pupil_arrays_left)
#
# pupil_file_right = string_name.format(step="pupil_pos_right")
# if os.path.exists(pupil_file_right):
# print("Loading pupils right")
# pupil_arrays_right = {}
# data = np.load(pupil_file_right, allow_pickle=True)
# for k in data.keys():
# pupil_arrays_right[k] = data[k]
# pupil_list_right = gaze_utils.arraydict_to_dictlist(pupil_arrays_right)
# else:
# # Get eye files (Right eye)
# eye_right_time_file, eye_right_video_file = session.paths["eye_right"]
# inputs_pupil_right = dict(
# fpaths=dict(
# eye_video=eye_right_video_file, timestamps=eye_right_time_file,
# ),
# variable_names=None,
# )
# print("\n\nRunning pupil detection for the right eye\n\n")
# # Run pupil detection
# pupil_list_right = vmp.utils.batch_run(
# fn_pupil,
# inputs_pupil_right,
# batch_size=batch_size_pupil,
# batch_combine_fn=vmp.utils.list_reduce,
# progress_bar=progress_bar,
# id=0, # right eye # FIX ME: OPTION HERE FOR R, L, or binocular
# properties=properties,
# )
# # Get arrays instead of list of dicts
# pupil_arrays_right = gaze_utils.dictlist_to_arraydict(pupil_list_right)
# # Save pupil detection
#
# np.savez(pupil_file_right, **pupil_arrays_right)
#
# # (2) Calibration Marker detection
# cal_ref_file = string_name.format(step="calibration_ref_pos")
# # Get world video files
# # Todo: Make sure this is loaded only if necessary
# world_time_file, world_video_file = session.paths["world_camera"]
# # Load 1 second of data 10 seconds in (to allow time for camera to start)
# world_time, world_video = session.load("world_camera", idx=(10, 11))
# _, video_vdim, video_hdim = world_video.shape[:3]
#
# if os.path.exists(cal_ref_file):
# print("Loading calibration markers")
# ref_arrays = {}
# data = np.load(cal_ref_file, allow_pickle=True)
# for k in data.keys():
# ref_arrays[k] = data[k]
# ref_list = gaze_utils.arraydict_to_dictlist(ref_arrays)
# else:
# fn_marker = marker_detection.find_concentric_circles
# inputs_marker = dict(
# fpaths=dict(video_data=world_video_file, timestamps=world_time_file),
# variable_names=None,
# )
# print("\n\nRunning Calibration marker detection \n\n")
# # Run marker detection
# ref_list = vmp.utils.batch_run(
# fn_marker,
# inputs_marker,
# batch_size=batch_size_marker,
# batch_combine_fn=vmp.utils.list_reduce,
# scale=marker_rescale,
# progress_bar=progress_bar,
# )
# # Get arrays instead of dicts
# ref_arrays = gaze_utils.dictlist_to_arraydict(ref_list)
# # Save calibration markers
# np.savez(cal_ref_file, **ref_arrays)
#
# # (3) Validation Marker detection
# val_ref_file = string_name.format(step="validation_ref_pos_dict")
# # Todo: Make sure this is handled correctly
# # Get world video files
# # world_time_file, world_video_file = session.paths["world_camera"]
# # Load 1 second of data 10 seconds in (to allow time for camera to start)
# # world_time, world_video = session.load("world_camera", idx=(10, 11))
# # _, video_vdim, video_hdim = world_video.shape[:3]
#
# if os.path.exists(val_ref_file):
# print("Loading validation markers")
# ref_arrays = {}
# data = np.load(val_ref_file, allow_pickle=True)
# for k in data.keys():
# ref_arrays[k] = data[k]
# ref_list = gaze_utils.arraydict_to_dictlist(ref_arrays)
# else:
# fn_marker = marker_detection.find_checkerboard
# inputs_marker = dict(
# fpaths=dict(video_data=world_video_file, timestamps=world_time_file),
# variable_names=None,
# )
# print("\n\nRunning Validation marker detection \n\n")
# # Run marker detection
# val_ref_list = vmp.utils.batch_run(
# fn_marker,
# inputs_marker,
# batch_size=batch_size_marker,
# batch_combine_fn=vmp.utils.list_reduce,
# scale=0.5,
# progress_bar=progress_bar,
# )
# print(val_ref_list)
# np.savez(val_ref_file, val_ref_list)
# val_ref_file = string_name.format(step="validation_ref_pos")
# # Get arrays instead of dicts
# val_ref_arrays = gaze_utils.dictlist_to_arraydict(val_ref_list)
# # Save calibration markers
# np.savez(val_ref_file, **val_ref_arrays)
#
# # (4) Append left and right pupil lists
# # Append the two pupil lists (list of dicts compatible with pupil notation)
# # And then pass the appended list to the calibration routine
#
# pupil_list_binocular = []
# pupil_list_binocular.extend(pupil_list_left)
# pupil_list_binocular.extend(pupil_list_right)
#
# # Get arrays instead of list of dicts
# pupil_arrays_binocular = gaze_utils.dictlist_to_arraydict(pupil_list_binocular)
# pupil_arrays_right = gaze_utils.dictlist_to_arraydict(pupil_list_right)
# pupil_arrays_left = gaze_utils.dictlist_to_arraydict(pupil_list_left)
#
# print("\n\nAppending left and right pupil positions")
# print("left:{} right:{} binocular:{} \n\n".format(len(pupil_list_left), len(pupil_list_right), len(pupil_list_binocular)))
#
# # (5) Calibrate
# # Get data for pupil calibration
# print("\n\nGetting data for calibration \n\n")
# is_binocular, matched_data_binocular = calibrate_pl.get_data(pupil_list_binocular, ref_list, mode="2d")
# # Run calibration
# print("\n\nRunning 2d binocular calibration \n\n")
# method, result = calibrate_pl.calibrate_2d_binocular(
# *matched_data_binocular, frame_size=(video_vdim, video_hdim)
# )
# # (6) Map gaze to video coordinates
# # Mapper takes two inputs: normalized pupil x and y position
# print("\n\nRunning gaze mapper [binocular] \n\n")
#
# # Create mapper for gaze
# if (result):
# binocular_gaze_mapper = Binocular_Gaze_Mapper(result["args"]["params"], result["args"]["params_eye0"], result["args"]["params_eye1"])
# gaze_binocular = binocular_gaze_mapper.map_batch(pupil_list_binocular)
# # Transpose output so time is the first dimension
# # TODO: Make sure the format is consistent with the monocular gaze
# # gaze_binocular = np.vstack(gaze_binocular).T
#
# gaze_file = string_name.format(step="gaze")
# np.savez(gaze_file, gaze_binocular=gaze_binocular)
# final_result = True
# else:
# print("\n\nGaze Mapping Failed for Subject: ", session_id)
# final_result = False
# return final_result
#
#
# def pupil_2d_monocular_v02(
# video_file_name,
# session_folder,
# sname=None, # Base for each file?
# tag="pupil_2d_monocular_v02",
# output_path=None,
# batch_size_pupil="auto",
# progress_bar=tqdm.tqdm,
# properties=None,
# ):
# """
# Parameters
# ----------
# tag : A short label for the pipeline
# session_folder : string
# file path to session. Ultimately want to replace this with a session
# object from the database.
# sname : format string
# must contain {step}; if provided, tag and output_path are ignored
#
# Notes
# -----
# Ultimately, for saving files, we want the output of each step saved
# along with the function and parameters that were used to generate it.
#
# This is not the case now. Needs work.
# """
# # Deal with inputs
# if output_path is None:
# output_path = session_folder
# if sname is None:
# sname = os.path.join(output_path, "gaze_vedb", tag + "_" + video_file_name[:-4] + "_{step}.npz")
# fdir, _ = os.path.split(sname)
# if not os.path.exists(fdir):
# print("creating", fdir)
# os.makedirs(fdir)
# # (0) Get session
# # ses = vedb_store.Session(folder=session_folder)
# # (1) Pupil detection (L, R)
# fn_pupil = pupil_detection_pl.plabs_detect_pupil
#
# pupil_file_left = sname.format(step="pupilpos_left")
# if os.path.exists(pupil_file_left):
# print("Loading pupils left")
# pupil_arrays_left = {}
# dat = np.load(pupil_file_left, allow_pickle=True)
# for k in dat.keys():
# pupil_arrays_left[k] = dat[k]
# pupil_list_left = gaze_utils.arraydict_to_dictlist(pupil_arrays_left)
# print(pupil_list_left[0].keys())
# print("found pupil file for: ", pupil_file_left, "\n\n")
# else:
# # Get eye files (Left eye)
# eye_left_video_file = session_folder + video_file_name
# inputs_pupil_left = dict(
# fpaths=dict(eye_video=eye_left_video_file,),
# variable_names=None,
# )
# print("\n\nRunning pupil detection for the left eye\n\n")
# # Run pupil detection
# pupil_list_left = vmp.utils.batch_run(
# fn_pupil,
# inputs_pupil_left,
# batch_size=batch_size_pupil,
# batch_combine_fn=vmp.utils.list_reduce,
# progress_bar=progress_bar,
# id=1, # left eye # FIX ME: OPTION HERE FOR R, L, or binocular
# properties=properties,
# )
# # Get arrays instead of list of dicts
# pupil_arrays_left = gaze_utils.dictlist_to_arraydict(pupil_list_left)
# # Save pupil detection
# pupil_file_left = sname.format(step="pupilpos_left")
# np.savez(pupil_file_left, **pupil_arrays_left)
# print("\n\nSaved left pupil data into:",pupil_file_left,"\n\n")
#
| 41.44381
| 143
| 0.634755
| 2,834
| 21,758
| 4.592802
| 0.094213
| 0.021435
| 0.016979
| 0.016057
| 0.805778
| 0.782652
| 0.761063
| 0.740473
| 0.722265
| 0.714889
| 0
| 0.005325
| 0.257744
| 21,758
| 524
| 144
| 41.522901
| 0.800619
| 0.937908
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001908
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
6b74f3469f0b848d56925ae5e83a25441cebb4f4
| 22,043
|
py
|
Python
|
LabReport/Lab4/edit_post.py
|
Liu-Hong-De/Software_test
|
068bbadd7b6d369445994e16aea4289618337910
|
[
"Apache-2.0"
] | null | null | null |
LabReport/Lab4/edit_post.py
|
Liu-Hong-De/Software_test
|
068bbadd7b6d369445994e16aea4289618337910
|
[
"Apache-2.0"
] | 1
|
2022-01-21T23:39:34.000Z
|
2022-01-21T23:39:34.000Z
|
LabReport/Lab4/edit_post.py
|
Liu-Hong-De/Software_test
|
068bbadd7b6d369445994e16aea4289618337910
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import time
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from bs4 import BeautifulSoup
from selenium.webdriver.support.ui import Select
class EditPost(unittest.TestCase):
# use the demo account to sign in and create a post to edit
def setUp(self):
self.driver = webdriver.Chrome()
driver =self.driver
driver.implicitly_wait(5) # set a waiting time at most 20 seconds
driver.get("http://127.0.0.1:3000")
driver.find_element_by_xpath("//*[@id=\"navbar-collapse\"]/ul[2]/li[2]/a").click() # click the sign in button
time.sleep(1)
driver.find_element_by_name("email").send_keys("demo@keystonejs.com") # enter the email and password
driver.find_element_by_name("password").send_keys("demo")
time.sleep(1)
driver.find_element_by_xpath("//*[@id=\"signin-view\"]/div/div[1]/div/div[2]/form/button").click() # click to sign in
time.sleep(2)
driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div[2]/div/div[1]/div[2]/div[1]/span/a[2]").click()
time.sleep(1)
driver.find_element_by_name("name").send_keys("use selenium to edit a post")
time.sleep(1)
try:
driver.find_element_by_class_name("css-h629qq").click()
except:
driver.find_element_by_class_name("css-nil").submit()
time.sleep(1)
driver.find_element_by_class_name("css-dmf4a8").click()
time.sleep(2)
# test case 1
# edit post with empty name
# empty name is invalid
def test_EditPostWithEmptyName(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
page_result = []
page_result.append(driver.page_source)
NewExplain = []
for item0 in page_result:
html_soup = BeautifulSoup(item0, "lxml")
NewExplain = html_soup.select('#react-root > div > main > div > div > div.css-1xkojxp > form > div.css-1nqppvz > div')
assert "Name is required" in NewExplain[0].text
driver.back()
time.sleep(2)
# test case 2
# edit post with name
def test_EditPostWithName(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
assert "Your changes have been saved successfully" in driver.find_element_by_class_name("css-ctpeu").text
driver.back()
time.sleep(1)
assert "edit OK" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[3]/div/div/table/tbody/tr[1]/td[2]/a").text
time.sleep(2)
# test case 3
# edit post with state is Published
def test_EditPostWithStatePublished(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
inputList = driver.find_elements_by_tag_name("input")
inputListState = []
[inputListState.append(input) for input in inputList if input.is_displayed()]
inputListState[2].send_keys("Published")
inputListState[2].send_keys(Keys.ENTER)
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
assert "Your changes have been saved successfully" in driver.find_element_by_class_name("css-ctpeu").text
driver.back()
time.sleep(1)
assert "edit OK" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[3]/div/div/table/tbody/tr[1]/td[2]/a").text
assert "Published" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[3]/div/div/table/tbody/tr[1]/td[3]/div").text
time.sleep(2)
# test case 4
# edit post with state is Archived
def test_EditPostWithStateArchived(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
inputList = driver.find_elements_by_tag_name("input")
inputListState = []
[inputListState.append(input) for input in inputList if input.is_displayed()]
inputListState[2].send_keys("Archived")
inputListState[2].send_keys(Keys.ENTER)
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
assert "Your changes have been saved successfully" in driver.find_element_by_class_name("css-ctpeu").text
driver.back()
time.sleep(1)
assert "edit OK" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[3]/div/div/table/tbody/tr[1]/td[2]/a").text
assert "Archived" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[3]/div/div/table/tbody/tr[1]/td[3]/div").text
time.sleep(2)
# test case 5
# edit post with author is Demo User
def test_EditPostWithAuthor(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
inputList = driver.find_elements_by_tag_name("input")
inputListAuthor = []
[inputListAuthor.append(input) for input in inputList if input.is_displayed()]
inputListAuthor[3].send_keys("Demo User")
inputListAuthor[3].send_keys(Keys.ENTER)
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
assert "Your changes have been saved successfully" in driver.find_element_by_class_name("css-ctpeu").text
driver.back()
time.sleep(1)
assert "edit OK" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[3]/div/div/table/tbody/tr[1]/td[2]/a").text
assert "Demo User" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[3]/div/div/table/tbody/tr/td[4]/a").text
time.sleep(2)
# test case 6
# edit post with published date format is yyyy-mmdd
def test_EditPostWithDate_yyyy_mmdd(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
inputList = driver.find_elements_by_tag_name("input")
inputListDate = []
[inputListDate.append(input) for input in inputList if input.is_displayed()]
inputListDate[4].send_keys(Keys.CONTROL, "a")
inputListDate[4].send_keys(Keys.BACK_SPACE)
inputListDate[4].send_keys("2020-0603")
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_DOWN)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_UP)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
assert "Your changes have been saved successfully" in driver.find_element_by_class_name("css-ctpeu").text
driver.back()
time.sleep(1)
assert "edit OK" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[3]/div/div/table/tbody/tr[1]/td[2]/a").text
assert "June 3rd 2020" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[3]/div/div/table/tbody/tr/td[5]/div").text
time.sleep(2)
# test case 7
# failed
# expected:June 3rd 2020
# actual:March 1st 2020
# test post with published date format is yyyymm-dd
def test_EditPostWithDate_yyyymm_dd(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
inputList = driver.find_elements_by_tag_name("input")
inputListDate = []
[inputListDate.append(input) for input in inputList if input.is_displayed()]
inputListDate[4].send_keys(Keys.CONTROL, "a")
inputListDate[4].send_keys(Keys.BACK_SPACE)
inputListDate[4].send_keys("202006-03")
time.sleep(1)
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_DOWN)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_UP)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
assert "Your changes have been saved successfully" in driver.find_element_by_class_name("css-ctpeu").text
driver.back()
time.sleep(1)
assert "edit OK" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[3]/div/div/table/tbody/tr[1]/td[2]/a").text
assert "June 3rd 2020" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[3]/div/div/table/tbody/tr/td[5]/div").text
time.sleep(2)
# test case 8
# edit post with published date format is yyyymmdd
def test_EditPostWithDate_yyyymmdd(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
inputList = driver.find_elements_by_tag_name("input")
inputListDate = []
[inputListDate.append(input) for input in inputList if input.is_displayed()]
inputListDate[4].send_keys(Keys.CONTROL, "a")
inputListDate[4].send_keys(Keys.BACK_SPACE)
inputListDate[4].send_keys("20200603")
time.sleep(1)
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_DOWN)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_UP)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
assert "Your changes have been saved successfully" in driver.find_element_by_class_name("css-ctpeu").text
driver.back()
time.sleep(1)
assert "edit OK" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[3]/div/div/table/tbody/tr[1]/td[2]/a").text
assert "June 3rd 2020" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[3]/div/div/table/tbody/tr/td[5]/div").text
time.sleep(2)
# test case 9
# test post with published date format is yyyymdd
# date format is invalid
def test_EditPostWithDate_yyyymdd(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
inputList = driver.find_elements_by_tag_name("input")
inputListDate = []
[inputListDate.append(input) for input in inputList if input.is_displayed()]
inputListDate[4].send_keys(Keys.CONTROL, "a")
inputListDate[4].send_keys(Keys.BACK_SPACE)
inputListDate[4].send_keys("2020603")
time.sleep(1)
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_DOWN)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_UP)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
assert "PublishedDate is invalid" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[1]/form/div[1]/div").text
driver.back()
time.sleep(2)
# test case 10
# test post with published date Month is bigger than 12
# date format is invalid
def test_EditPostWithDate_Big_Month(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
inputList = driver.find_elements_by_tag_name("input")
inputListDate = []
[inputListDate.append(input) for input in inputList if input.is_displayed()]
inputListDate[4].send_keys(Keys.CONTROL, "a")
inputListDate[4].send_keys(Keys.BACK_SPACE)
inputListDate[4].send_keys("2020-60-03")
time.sleep(1)
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_DOWN)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_UP)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
assert "PublishedDate is invalid" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[1]/form/div[1]/div").text
driver.back()
time.sleep(2)
# test case 11
# test post with published date Day is bigger than the day of this year
# date format is invalid
def test_EditPostWithDate_Big_Day(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
inputList = driver.find_elements_by_tag_name("input")
inputListDate = []
[inputListDate.append(input) for input in inputList if input.is_displayed()]
inputListDate[4].send_keys(Keys.CONTROL, "a")
inputListDate[4].send_keys(Keys.BACK_SPACE)
inputListDate[4].send_keys("2020-06-99")
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_DOWN)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_UP)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
assert "PublishedDate is invalid" in driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[1]/form/div[1]/div").text
driver.back()
time.sleep(2)
# test case 12
# failed
# expected:PublishedDate is invalid
# actual:Your changes have been saved successfully
# test post with published date Month is more than two digits
def test_EditPostWithDate_Month_MoreThanTwoDigits(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
inputList = driver.find_elements_by_tag_name("input")
inputListDate = []
[inputListDate.append(input) for input in inputList if input.is_displayed()]
inputListDate[4].send_keys(Keys.CONTROL, "a")
inputListDate[4].send_keys(Keys.BACK_SPACE)
inputListDate[4].send_keys("2020-111-03")
time.sleep(1)
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_DOWN)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_UP)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
NewExplain = []
NewExplain = driver.find_elements_by_css_selector("#react-root > div > main > div > div > div.css-1xkojxp > form > div.css-ctpeu")
try:
assert "PublishedDate is invalid" in NewExplain[0].text
finally:
driver.back()
time.sleep(2)
# test case 13
# failed
# expected:PublishedDate is invalid
# actual:Your changes have been saved successfully
# test post with published date Day is more than two digits
def test_EditPostWithDate_Day_MoreThanTwoDigits(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
inputList = driver.find_elements_by_tag_name("input")
inputListDate = []
[inputListDate.append(input) for input in inputList if input.is_displayed()]
inputListDate[4].send_keys(Keys.CONTROL, "a")
inputListDate[4].send_keys(Keys.BACK_SPACE)
inputListDate[4].send_keys("2020-06-135")
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_DOWN)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_UP)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
NewExplain = []
NewExplain = driver.find_elements_by_css_selector("#react-root > div > main > div > div > div.css-1xkojxp > form > div.css-ctpeu")
try:
assert "PublishedDate is invalid" in NewExplain[0].text
finally:
driver.back()
time.sleep(2)
# test case 14
# test post with published date entering characters
# date format is invalid
def test_EditPostWithDate_Char(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
inputList = driver.find_elements_by_tag_name("input")
inputListDate = []
[inputListDate.append(input) for input in inputList if input.is_displayed()]
inputListDate[4].send_keys(Keys.CONTROL, "a")
inputListDate[4].send_keys(Keys.BACK_SPACE)
inputListDate[4].send_keys("date")
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_DOWN)
driver.find_element_by_class_name("css-2960tt").send_keys(Keys.ARROW_UP)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
assert "PublishedDate is invalid" in driver.find_element_by_css_selector("#react-root > div > main > div > div > div.css-1xkojxp > form > div.css-1nqppvz > div").text
driver.back()
time.sleep(2)
# test case 15
# test post with content brief
def test_EditPostWithContentBrief(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
iframeList = driver.find_elements_by_tag_name('iframe')
iframeList[0].send_keys(Keys.ARROW_DOWN)
iframeList[0].send_keys(Keys.ARROW_UP)
iframeList[0].send_keys('content brief')
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
assert "Your changes have been saved successfully" in driver.find_element_by_css_selector("#react-root > div > main > div > div > div.css-1xkojxp > form > div.css-ctpeu").text
driver.back()
time.sleep(1)
driver.find_element_by_link_text("edit OK").click()
time.sleep(1)
driver.switch_to.frame(0)
assert "content brief" in driver.find_element_by_tag_name('body').text
driver.back()
time.sleep(2)
# test case 16
# test posr with content extended
def test_EditPostWithContentExtended(self):
driver = self.driver
driver.find_element_by_link_text("use selenium to edit a post").click()
time.sleep(1)
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("edit OK")
time.sleep(1)
iframeList = driver.find_elements_by_tag_name('iframe')
iframeList[1].send_keys(Keys.ARROW_DOWN)
iframeList[1].send_keys(Keys.ARROW_UP)
iframeList[1].send_keys('content extended')
time.sleep(1)
driver.find_element_by_class_name("css-2960tt").click()
time.sleep(1)
assert "Your changes have been saved successfully" in driver.find_element_by_css_selector("#react-root > div > main > div > div > div.css-1xkojxp > form > div.css-ctpeu").text
driver.back()
time.sleep(1)
driver.find_element_by_link_text("edit OK").click()
time.sleep(1)
driver.switch_to.frame(1)
assert "content extended" in driver.find_element_by_tag_name('body').text
driver.back()
time.sleep(2)
def tearDown(self):
driver = self.driver
driver.find_element_by_xpath("//*[@id=\"react-root\"]/div/main/div/div/div[2]/div/div[1]/div/div/button").click()
time.sleep(1)
driver.find_element_by_class_name("css-12yx24t").click()
time.sleep(1)
driver.find_element_by_class_name("css-rd63ky").click()
time.sleep(1)
driver.find_element_by_class_name("css-t4884").click()
time.sleep(2)
driver.close()
if __name__ == "__main__":
unittest.main()
| 47.712121
| 183
| 0.66334
| 3,129
| 22,043
| 4.458293
| 0.069671
| 0.100358
| 0.151111
| 0.168889
| 0.874839
| 0.866237
| 0.853405
| 0.821864
| 0.802151
| 0.799857
| 0
| 0.028084
| 0.206868
| 22,043
| 462
| 184
| 47.712121
| 0.769834
| 0.063921
| 0
| 0.794937
| 0
| 0.053165
| 0.186222
| 0.050041
| 0
| 0
| 0
| 0
| 0.078481
| 1
| 0.04557
| false
| 0.002532
| 0.017722
| 0
| 0.065823
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6ba9d7cb98756308cfd868057a5f04a30241784f
| 2,243
|
py
|
Python
|
test/test_add_and_remove_contact_from_group.py
|
yulia-baturina/python_training
|
ef29b64e284ef2a2526092c9cb474b9bb489e1d0
|
[
"Apache-2.0"
] | null | null | null |
test/test_add_and_remove_contact_from_group.py
|
yulia-baturina/python_training
|
ef29b64e284ef2a2526092c9cb474b9bb489e1d0
|
[
"Apache-2.0"
] | null | null | null |
test/test_add_and_remove_contact_from_group.py
|
yulia-baturina/python_training
|
ef29b64e284ef2a2526092c9cb474b9bb489e1d0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from model.contact import Contact
from model.group import Group
import random
def test_add_random_contact_to_random_group(app, orm):
if len(orm.get_group_list()) == 0:
app.group.create(Group(name="new", header="header", footer="footer"))
if len(orm.get_contact_list()) == 0:
app.contact.create(Contact(firstname="first", lastname="last", nickname="nick",
company="company", homePhone="+11111111111", email="mail@mail.com"))
groups = orm.get_group_list()
group = random.choice(groups)
group_index = groups.index(group)
contacts = orm.get_contact_list()
contact = random.choice(contacts)
old_contacts_in_group = orm.get_contacts_in_group(group)
app.contact.assign_contact_by_id_to_group(contact.id, group.name)
new_groups = orm.get_group_list()
new_group = new_groups[group_index]
new_contacts_in_group = orm.get_contacts_in_group(new_group)
old_contacts_in_group.append(contact)
assert sorted(old_contacts_in_group, key=Contact.id_or_max) == sorted(new_contacts_in_group, key=Contact.id_or_max)
def test_add_and_remove_random_contact_from_random_group(app, orm):
if len(orm.get_group_list()) == 0:
app.group.create(Group(name="new", header="header", footer="footer"))
if len(orm.get_contact_list()) == 0:
app.contact.create(Contact(firstname="first", lastname="last", nickname="nick",
company="company", homePhone="+11111111111", email="mail@mail.com"))
groups = orm.get_group_list()
group = random.choice(groups)
group_index = groups.index(group)
contacts = orm.get_contact_list()
contact = random.choice(contacts)
app.contact.assign_contact_by_id_to_group(contact.id, group.name)
old_groups = orm.get_group_list()
old_group = old_groups[group_index]
old_contacts_in_group = orm.get_contacts_in_group(old_group)
app.group.remove_contact_by_id_from_group(contact.id)
new_groups = orm.get_group_list()
new_group = new_groups[group_index]
new_contacts_in_group = orm.get_contacts_in_group(new_group)
old_contacts_in_group.remove(contact)
assert sorted(old_contacts_in_group, key=Contact.id_or_max) == sorted(new_contacts_in_group, key=Contact.id_or_max)
| 46.729167
| 119
| 0.742755
| 334
| 2,243
| 4.634731
| 0.161677
| 0.05814
| 0.135659
| 0.067829
| 0.848191
| 0.834625
| 0.834625
| 0.834625
| 0.834625
| 0.784238
| 0
| 0.01391
| 0.134641
| 2,243
| 47
| 120
| 47.723404
| 0.783617
| 0.009362
| 0
| 0.714286
| 0
| 0
| 0.054078
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 1
| 0.047619
| false
| 0
| 0.071429
| 0
| 0.119048
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d406600e53ece748c4d289e719d8fc8648a1f3b1
| 3,485
|
py
|
Python
|
market/products/migrations/0007_auto_20201218_1541.py
|
hbvj99/market-api
|
489c9433556002cb391b93cbd6486da739c2418a
|
[
"MIT"
] | 1
|
2021-08-28T05:30:40.000Z
|
2021-08-28T05:30:40.000Z
|
market/products/migrations/0007_auto_20201218_1541.py
|
hbvj99/market-api
|
489c9433556002cb391b93cbd6486da739c2418a
|
[
"MIT"
] | 1
|
2022-01-14T08:57:19.000Z
|
2022-01-14T08:57:20.000Z
|
market/products/migrations/0007_auto_20201218_1541.py
|
hbvj99/market-api
|
489c9433556002cb391b93cbd6486da739c2418a
|
[
"MIT"
] | 1
|
2022-01-11T10:14:27.000Z
|
2022-01-11T10:14:27.000Z
|
# Generated by Django 3.1.1 on 2020-12-18 15:41
import cuser.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('products', '0006_auto_20201116_1718'),
]
operations = [
migrations.AddField(
model_name='category',
name='created_by',
field=cuser.fields.CurrentUserField(editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='products_category_created', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='category',
name='deleted_at',
field=models.DateTimeField(editable=False, null=True),
),
migrations.AddField(
model_name='category',
name='updated_by',
field=cuser.fields.CurrentUserField(editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='products_category_modified', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='comment',
name='created_by',
field=cuser.fields.CurrentUserField(editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='products_comment_created', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='comment',
name='deleted_at',
field=models.DateTimeField(editable=False, null=True),
),
migrations.AddField(
model_name='comment',
name='updated_by',
field=cuser.fields.CurrentUserField(editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='products_comment_modified', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='product',
name='created_by',
field=cuser.fields.CurrentUserField(editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='products_product_created', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='product',
name='deleted_at',
field=models.DateTimeField(editable=False, null=True),
),
migrations.AddField(
model_name='product',
name='updated_by',
field=cuser.fields.CurrentUserField(editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='products_product_modified', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='productvotes',
name='created_by',
field=cuser.fields.CurrentUserField(editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='products_productvotes_created', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='productvotes',
name='deleted_at',
field=models.DateTimeField(editable=False, null=True),
),
migrations.AddField(
model_name='productvotes',
name='updated_by',
field=cuser.fields.CurrentUserField(editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='products_productvotes_modified', to=settings.AUTH_USER_MODEL),
),
]
| 44.679487
| 197
| 0.661693
| 381
| 3,485
| 5.826772
| 0.149606
| 0.097297
| 0.124324
| 0.145946
| 0.876577
| 0.876577
| 0.827928
| 0.827928
| 0.827928
| 0.827928
| 0
| 0.011507
| 0.226973
| 3,485
| 77
| 198
| 45.25974
| 0.812546
| 0.012912
| 0
| 0.732394
| 1
| 0
| 0.13409
| 0.06719
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.056338
| 0
| 0.098592
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d426fbf42f1ca4bcb5ff310655613f040c1cdc37
| 232
|
py
|
Python
|
swyft/utils/__init__.py
|
adam-coogan/swyft
|
c54bdd9f77ddf02fda857e26640df012cbe545fc
|
[
"MIT"
] | null | null | null |
swyft/utils/__init__.py
|
adam-coogan/swyft
|
c54bdd9f77ddf02fda857e26640df012cbe545fc
|
[
"MIT"
] | null | null | null |
swyft/utils/__init__.py
|
adam-coogan/swyft
|
c54bdd9f77ddf02fda857e26640df012cbe545fc
|
[
"MIT"
] | null | null | null |
from swyft.utils.array import *
from swyft.utils.device import *
from swyft.utils.mutils import *
from swyft.utils.parameters import *
from swyft.utils.plot import *
from swyft.utils.utils import *
from swyft.utils.wmutils import *
| 29
| 36
| 0.788793
| 35
| 232
| 5.228571
| 0.285714
| 0.344262
| 0.535519
| 0.655738
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12069
| 232
| 7
| 37
| 33.142857
| 0.897059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2e0ff73729ac809e7efd19f8d2b3c0ed24d733e6
| 123
|
py
|
Python
|
code/exercises/04_ControlFlow/ex_04_02_for_before.py
|
chiachang100/LearnToCodeWithPython
|
fe16115cb3be612d5abd8ffdbd6a14a37d6b4d52
|
[
"Apache-2.0"
] | null | null | null |
code/exercises/04_ControlFlow/ex_04_02_for_before.py
|
chiachang100/LearnToCodeWithPython
|
fe16115cb3be612d5abd8ffdbd6a14a37d6b4d52
|
[
"Apache-2.0"
] | null | null | null |
code/exercises/04_ControlFlow/ex_04_02_for_before.py
|
chiachang100/LearnToCodeWithPython
|
fe16115cb3be612d5abd8ffdbd6a14a37d6b4d52
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Filename: ex_for_before.py
print('Hello')
print('Hello')
print('Hello')
print('Hello')
print('Hello')
| 13.666667
| 28
| 0.691057
| 18
| 123
| 4.611111
| 0.555556
| 0.60241
| 0.722892
| 0.963855
| 0.60241
| 0.60241
| 0.60241
| 0.60241
| 0
| 0
| 0
| 0
| 0.081301
| 123
| 8
| 29
| 15.375
| 0.734513
| 0.349594
| 0
| 1
| 0
| 0
| 0.320513
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
2e3dbe78d095cdb07e2d3eff292574e4df1c6d51
| 33,787
|
py
|
Python
|
lib/installed_clients/kb_uploadmethodsClient.py
|
abbyjerger/Snekmer
|
46640d8516289401258a83125483d502179f68d3
|
[
"MIT"
] | null | null | null |
lib/installed_clients/kb_uploadmethodsClient.py
|
abbyjerger/Snekmer
|
46640d8516289401258a83125483d502179f68d3
|
[
"MIT"
] | null | null | null |
lib/installed_clients/kb_uploadmethodsClient.py
|
abbyjerger/Snekmer
|
46640d8516289401258a83125483d502179f68d3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
############################################################
#
# Autogenerated by the KBase type compiler -
# any changes made here will be overwritten
#
############################################################
from __future__ import print_function
# the following is a hack to get the baseclient to import whether we're in a
# package or not. This makes pep8 unhappy hence the annotations.
try:
# baseclient and this client are in a package
from .baseclient import BaseClient as _BaseClient # @UnusedImport
except ImportError:
# no they aren't
from baseclient import BaseClient as _BaseClient # @Reimport
class kb_uploadmethods(object):
def __init__(
self, url=None, timeout=30 * 60, user_id=None,
password=None, token=None, ignore_authrc=False,
trust_all_ssl_certificates=False,
auth_svc='https://ci.kbase.us/services/auth/api/legacy/KBase/Sessions/Login',
service_ver='release',
async_job_check_time_ms=100, async_job_check_time_scale_percent=150,
async_job_check_max_time_ms=300000):
if url is None:
raise ValueError('A url is required')
self._service_ver = service_ver
self._client = _BaseClient(
url, timeout=timeout, user_id=user_id, password=password,
token=token, ignore_authrc=ignore_authrc,
trust_all_ssl_certificates=trust_all_ssl_certificates,
auth_svc=auth_svc,
async_job_check_time_ms=async_job_check_time_ms,
async_job_check_time_scale_percent=async_job_check_time_scale_percent,
async_job_check_max_time_ms=async_job_check_max_time_ms)
def upload_fastq_file(self, params, context=None):
"""
:param params: instance of type "UploadMethodParams"
(sequencing_tech: sequencing technology name: output reads file
name workspace_name: workspace name/ID of the object For files in
user's staging area: fwd_staging_file_name: single-end fastq file
name or forward/left paired-end fastq file name from user's
staging area rev_staging_file_name: reverse/right paired-end fastq
file name user's staging area For files from web: download_type:
download type for web source fastq file ('Direct Download', 'FTP',
'DropBox', 'Google Drive') fwd_file_url: single-end fastq file URL
or forward/left paired-end fastq file URL rev_file_url:
reverse/right paired-end fastq file URL urls_to_add: used for
parameter-groups. dict of {fwd_file_url, rev_file_url, name,
single_genome, interleaved, insert_size_mean and
read_orientation_outward} Optional Params: single_genome: whether
the reads are from a single genome or a metagenome. interleaved:
whether reads is interleaved insert_size_mean: mean (average)
insert length insert_size_std_dev: standard deviation of insert
lengths read_orientation_outward: whether reads in a pair point
outward) -> structure: parameter "workspace_name" of type
"workspace_name" (workspace name of the object), parameter
"fwd_staging_file_name" of type "fwd_staging_file_name" (input and
output file path/url), parameter "rev_staging_file_name" of type
"rev_staging_file_name", parameter "download_type" of type
"download_type", parameter "fwd_file_url" of type "fwd_file_url",
parameter "rev_file_url" of type "rev_file_url", parameter
"sequencing_tech" of type "sequencing_tech", parameter "name" of
type "name", parameter "urls_to_add" of type "urls_to_add" ->
structure: parameter "fwd_file_url" of type "fwd_file_url",
parameter "rev_file_url" of type "rev_file_url", parameter "name"
of type "name", parameter "single_genome" of type "single_genome",
parameter "interleaved" of type "interleaved", parameter
"insert_size_mean" of type "insert_size_mean", parameter
"insert_size_std_dev" of type "insert_size_std_dev", parameter
"read_orientation_outward" of type "read_orientation_outward",
parameter "single_genome" of type "single_genome", parameter
"interleaved" of type "interleaved", parameter "insert_size_mean"
of type "insert_size_mean", parameter "insert_size_std_dev" of
type "insert_size_std_dev", parameter "read_orientation_outward"
of type "read_orientation_outward"
:returns: instance of type "UploadMethodResult" -> structure:
parameter "obj_ref" of type "obj_ref", parameter "report_name" of
type "report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.upload_fastq_file',
[params], self._service_ver, context)
def upload_fasta_gff_file(self, params, context=None):
"""
:param params: instance of type "UploadFastaGFFMethodParams"
(Required: genome_name: output genome object name workspace_name:
workspace name/ID of the object For staging area: fasta_file:
fasta file containing assembled contigs/chromosomes gff_file: gff
file containing predicted gene models and corresponding features
Optional params: scientific_name - the scientific name of the
genome. taxon_id - the numeric ID of the NCBI taxon to which this
genome belongs. If defined, will try to link the Genome to the
specified taxonomy id in lieu of performing the lookup during
upload source: Source Of The GFF File. Default to 'User'
taxon_wsname - where the reference taxons are. Default to
'ReferenceTaxons' release: Release Or Version Of The Source Data
genetic_code: Genetic Code For The Organism type: 'Reference',
'User upload', 'Representative') -> structure: parameter
"fasta_file" of String, parameter "gff_file" of String, parameter
"genome_name" of String, parameter "workspace_name" of type
"workspace_name" (workspace name of the object), parameter
"genome_type" of String, parameter "scientific_name" of String,
parameter "source" of String, parameter "taxon_wsname" of String,
parameter "taxon_id" of String, parameter "release" of String,
parameter "genetic_code" of Long, parameter "type" of String,
parameter "generate_missing_genes" of String
:returns: instance of type "UploadFastaGFFMethodResult" -> structure:
parameter "genome_ref" of String, parameter "genome_info" of
String, parameter "report_name" of type "report_name", parameter
"report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.upload_fasta_gff_file',
[params], self._service_ver, context)
def upload_metagenome_fasta_gff_file(self, params, context=None):
"""
:param params: instance of type
"UploadMetagenomeFastaGFFMethodParams" (Required: genome_name:
output metagenome object name workspace_name: workspace name/ID of
the object For staging area: fasta_file: fasta file containing
assembled contigs/chromosomes gff_file: gff file containing
predicted gene models and corresponding features Optional params:
source: Source Of The GFF File. Default to 'User' taxon_wsname -
where the reference taxons are. Default to 'ReferenceTaxons'
taxon_id - if defined, will try to link the Genome to the
specified taxonomy id in lieu of performing the lookup during
upload release: Release Or Version Of The Source Data
genetic_code: Genetic Code For The Organism type: 'Reference',
'User upload', 'Representative') -> structure: parameter
"fasta_file" of String, parameter "gff_file" of String, parameter
"genome_name" of String, parameter "workspace_name" of type
"workspace_name" (workspace name of the object), parameter
"source" of String, parameter "taxon_wsname" of String, parameter
"taxon_id" of String, parameter "release" of String, parameter
"genetic_code" of Long, parameter "type" of String, parameter
"generate_missing_genes" of String
:returns: instance of type "UploadMetagenomeFastaGFFMethodResult" ->
structure: parameter "metagenome_ref" of String, parameter
"metagenome_info" of String, parameter "report_name" of type
"report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.upload_metagenome_fasta_gff_file',
[params], self._service_ver, context)
def batch_import_genomes_from_staging(self, params, context=None):
"""
:param params: instance of type "BatchGenomeImporterParams" ->
structure: parameter "staging_subdir" of String, parameter
"genome_set_name" of String, parameter "workspace_name" of type
"workspace_name" (workspace name of the object), parameter
"genome_type" of String, parameter "source" of String, parameter
"taxon_wsname" of String, parameter "taxon_id" of String,
parameter "release" of String, parameter "genetic_code" of Long,
parameter "generate_missing_genes" of String
:returns: instance of type "BatchImporterResult" -> structure:
parameter "set_ref" of String, parameter "report_name" of type
"report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.batch_import_genomes_from_staging',
[params], self._service_ver, context)
def batch_import_assemblies_from_staging(self, params, context=None):
"""
:param params: instance of type "BatchAssemblyImporterParams" ->
structure: parameter "staging_subdir" of String, parameter
"assembly_set_name" of String, parameter "workspace_name" of type
"workspace_name" (workspace name of the object), parameter
"min_contig_length" of Long, parameter "type" of String
:returns: instance of type "BatchImporterResult" -> structure:
parameter "set_ref" of String, parameter "report_name" of type
"report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.batch_import_assemblies_from_staging',
[params], self._service_ver, context)
def unpack_staging_file(self, params, context=None):
"""
Unpack a staging area file
:param params: instance of type "UnpackStagingFileParams" (Input
parameters for the "unpack_staging_file" function. Required
parameters: staging_file_subdir_path: subdirectory file path e.g.
for file: /data/bulk/user_name/file_name staging_file_subdir_path
is file_name for file:
/data/bulk/user_name/subdir_1/subdir_2/file_name
staging_file_subdir_path is subdir_1/subdir_2/file_name
workspace_name: workspace name/ID of the object) -> structure:
parameter "workspace_name" of type "workspace_name" (workspace
name of the object), parameter "staging_file_subdir_path" of String
:returns: instance of type "UnpackStagingFileOutput" (Results from
the unpack_staging_file function. unpacked_file_path: unpacked
file path(s) in staging area) -> structure: parameter
"unpacked_file_path" of String
"""
return self._client.run_job('kb_uploadmethods.unpack_staging_file',
[params], self._service_ver, context)
def unpack_web_file(self, params, context=None):
"""
Download and unpack a web file to staging area
:param params: instance of type "UnpackWebFileParams" (Input
parameters for the "unpack_web_file" function. Required
parameters: workspace_name: workspace name/ID of the object
file_url: file URL download_type: one of ['Direct Download',
'FTP', 'DropBox', 'Google Drive'] Optional:
urls_to_add_web_unpack: used for parameter-groups. dict of
{file_url}) -> structure: parameter "workspace_name" of type
"workspace_name" (workspace name of the object), parameter
"file_url" of String, parameter "download_type" of String,
parameter "urls_to_add_web_unpack" of type
"urls_to_add_web_unpack" -> structure: parameter "file_url" of
String
:returns: instance of type "UnpackWebFileOutput" (Results from the
unpack_web_file function. unpacked_file_path: unpacked file
path(s) in staging area) -> structure: parameter
"unpacked_file_path" of String
"""
return self._client.run_job('kb_uploadmethods.unpack_web_file',
[params], self._service_ver, context)
def import_genbank_from_staging(self, params, context=None):
"""
:param params: instance of type "GenbankToGenomeParams"
(import_genbank_from_staging: wrapper method for
GenomeFileUtil.genbank_to_genome required params:
staging_file_subdir_path - subdirectory file path e.g. for file:
/data/bulk/user_name/file_name staging_file_subdir_path is
file_name for file:
/data/bulk/user_name/subdir_1/subdir_2/file_name
staging_file_subdir_path is subdir_1/subdir_2/file_name
genome_name - becomes the name of the object workspace_name - the
name of the workspace it gets saved to. source - Source of the
file typically something like RefSeq or Ensembl optional params:
scientific_name - the scientific name of the genome. taxon_id -
the numeric ID of the NCBI taxon to which this genome belongs. If
defined, will try to link the Genome to the specified taxonomy id
in lieu of performing the lookup during upload release - Release
or version number of the data per example Ensembl has numbered
releases of all their data: Release 31 generate_ids_if_needed - If
field used for feature id is not there, generate ids (default
behavior is raising an exception) generate_missing_genes -
Generate gene feature for CDSs that do not have a parent in file
genetic_code - Genetic code of organism. Overwrites determined GC
from taxon object type - Reference, Representative or User upload)
-> structure: parameter "staging_file_subdir_path" of String,
parameter "genome_name" of String, parameter "workspace_name" of
String, parameter "source" of String, parameter "genome_type" of
String, parameter "release" of String, parameter "genetic_code" of
Long, parameter "type" of String, parameter "scientific_name" of
String, parameter "taxon_id" of String, parameter
"generate_ids_if_needed" of String, parameter
"generate_missing_genes" of String
:returns: instance of type "GenomeSaveResult" -> structure: parameter
"genome_ref" of String
"""
return self._client.run_job('kb_uploadmethods.import_genbank_from_staging',
[params], self._service_ver, context)
def import_sra_from_staging(self, params, context=None):
"""
:param params: instance of type "SRAToReadsParams" (required params:
staging_file_subdir_path: subdirectory file path e.g. for file:
/data/bulk/user_name/file_name staging_file_subdir_path is
file_name for file:
/data/bulk/user_name/subdir_1/subdir_2/file_name
staging_file_subdir_path is subdir_1/subdir_2/file_name
sequencing_tech: sequencing technology name: output reads file
name workspace_name: workspace name/ID of the object Optional
Params: single_genome: whether the reads are from a single genome
or a metagenome. insert_size_mean: mean (average) insert length
insert_size_std_dev: standard deviation of insert lengths
read_orientation_outward: whether reads in a pair point outward)
-> structure: parameter "staging_file_subdir_path" of String,
parameter "sequencing_tech" of type "sequencing_tech", parameter
"name" of type "name", parameter "workspace_name" of type
"workspace_name" (workspace name of the object), parameter
"single_genome" of type "single_genome", parameter
"insert_size_mean" of type "insert_size_mean", parameter
"insert_size_std_dev" of type "insert_size_std_dev", parameter
"read_orientation_outward" of type "read_orientation_outward"
:returns: instance of type "UploadMethodResult" -> structure:
parameter "obj_ref" of type "obj_ref", parameter "report_name" of
type "report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.import_sra_from_staging',
[params], self._service_ver, context)
def import_sra_from_web(self, params, context=None):
"""
:param params: instance of type "WebSRAToReadsParams" -> structure:
parameter "download_type" of String, parameter "sra_urls_to_add"
of type "sra_urls_to_add" (download_type: download type for web
source fastq file ('Direct Download', 'FTP', 'DropBox', 'Google
Drive') sra_urls_to_add: dict of SRA file URLs required params:
file_url: SRA file URL sequencing_tech: sequencing technology
name: output reads file name workspace_name: workspace name/ID of
the object Optional Params: single_genome: whether the reads are
from a single genome or a metagenome. insert_size_mean: mean
(average) insert length insert_size_std_dev: standard deviation of
insert lengths read_orientation_outward: whether reads in a pair
point outward) -> structure: parameter "file_url" of String,
parameter "sequencing_tech" of type "sequencing_tech", parameter
"name" of type "name", parameter "single_genome" of type
"single_genome", parameter "insert_size_mean" of type
"insert_size_mean", parameter "insert_size_std_dev" of type
"insert_size_std_dev", parameter "read_orientation_outward" of
type "read_orientation_outward", parameter "workspace_name" of
type "workspace_name" (workspace name of the object)
:returns: instance of type "WebSRAToReadsResult" -> structure:
parameter "obj_refs" of list of String, parameter "report_name" of
type "report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.import_sra_from_web',
[params], self._service_ver, context)
def import_fasta_as_assembly_from_staging(self, params, context=None):
"""
:param params: instance of type "FastaToAssemblyParams" (required
params: staging_file_subdir_path: subdirectory file path e.g. for
file: /data/bulk/user_name/file_name staging_file_subdir_path is
file_name for file:
/data/bulk/user_name/subdir_1/subdir_2/file_name
staging_file_subdir_path is subdir_1/subdir_2/file_name
assembly_name: output Assembly file name workspace_name: workspace
name/ID of the object) -> structure: parameter
"staging_file_subdir_path" of String, parameter "assembly_name" of
String, parameter "workspace_name" of type "workspace_name"
(workspace name of the object), parameter "min_contig_length" of
Long, parameter "type" of String
:returns: instance of type "UploadMethodResult" -> structure:
parameter "obj_ref" of type "obj_ref", parameter "report_name" of
type "report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.import_fasta_as_assembly_from_staging',
[params], self._service_ver, context)
def import_tsv_as_media_from_staging(self, params, context=None):
"""
:param params: instance of type "FileToMediaParams" (required params:
staging_file_subdir_path: subdirectory file path e.g. for file:
/data/bulk/user_name/file_name staging_file_subdir_path is
file_name for file:
/data/bulk/user_name/subdir_1/subdir_2/file_name
staging_file_subdir_path is subdir_1/subdir_2/file_name
media_name: output Media file name workspace_name: workspace
name/ID of the object) -> structure: parameter
"staging_file_subdir_path" of String, parameter "media_name" of
String, parameter "workspace_name" of type "workspace_name"
(workspace name of the object)
:returns: instance of type "UploadMethodResult" -> structure:
parameter "obj_ref" of type "obj_ref", parameter "report_name" of
type "report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.import_tsv_as_media_from_staging',
[params], self._service_ver, context)
def import_excel_as_media_from_staging(self, params, context=None):
"""
:param params: instance of type "FileToMediaParams" (required params:
staging_file_subdir_path: subdirectory file path e.g. for file:
/data/bulk/user_name/file_name staging_file_subdir_path is
file_name for file:
/data/bulk/user_name/subdir_1/subdir_2/file_name
staging_file_subdir_path is subdir_1/subdir_2/file_name
media_name: output Media file name workspace_name: workspace
name/ID of the object) -> structure: parameter
"staging_file_subdir_path" of String, parameter "media_name" of
String, parameter "workspace_name" of type "workspace_name"
(workspace name of the object)
:returns: instance of type "UploadMethodResult" -> structure:
parameter "obj_ref" of type "obj_ref", parameter "report_name" of
type "report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.import_excel_as_media_from_staging',
[params], self._service_ver, context)
def import_tsv_or_excel_as_media_from_staging(self, params, context=None):
"""
:param params: instance of type "FileToMediaParams" (required params:
staging_file_subdir_path: subdirectory file path e.g. for file:
/data/bulk/user_name/file_name staging_file_subdir_path is
file_name for file:
/data/bulk/user_name/subdir_1/subdir_2/file_name
staging_file_subdir_path is subdir_1/subdir_2/file_name
media_name: output Media file name workspace_name: workspace
name/ID of the object) -> structure: parameter
"staging_file_subdir_path" of String, parameter "media_name" of
String, parameter "workspace_name" of type "workspace_name"
(workspace name of the object)
:returns: instance of type "UploadMethodResult" -> structure:
parameter "obj_ref" of type "obj_ref", parameter "report_name" of
type "report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.import_tsv_or_excel_as_media_from_staging',
[params], self._service_ver, context)
def import_file_as_fba_model_from_staging(self, params, context=None):
"""
:param params: instance of type "FileToFBAModelParams" (required
params: model_file: subdirectory file path for model file e.g. for
file: /data/bulk/user_name/file_name staging_file_subdir_path is
file_name for file:
/data/bulk/user_name/subdir_1/subdir_2/file_name
staging_file_subdir_path is subdir_1/subdir_2/file_name
compounds_file: same as above for compound (only used for tsv)
file_type: one of "tsv", "excel", "sbml" genome: the associated
species genome biomasses: one or more biomass reactions in model
model_name: output FBAModel object name workspace_name: workspace
name/ID of the object) -> structure: parameter "model_file" of
String, parameter "compounds_file" of String, parameter
"file_type" of String, parameter "genome" of String, parameter
"biomass" of String, parameter "model_name" of String, parameter
"workspace_name" of type "workspace_name" (workspace name of the
object)
:returns: instance of type "UploadMethodResult" -> structure:
parameter "obj_ref" of type "obj_ref", parameter "report_name" of
type "report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.import_file_as_fba_model_from_staging',
[params], self._service_ver, context)
def import_tsv_as_expression_matrix_from_staging(self, params, context=None):
"""
:param params: instance of type "FileToMatrixParams" (required
params: staging_file_subdir_path: subdirectory file path e.g. for
file: /data/bulk/user_name/file_name staging_file_subdir_path is
file_name for file:
/data/bulk/user_name/subdir_1/subdir_2/file_name
staging_file_subdir_path is subdir_1/subdir_2/file_name
matrix_name: output Expressin Matirx file name workspace_name:
workspace name/ID of the object genome_ref: optional reference to
a Genome object that will be used for mapping feature IDs to
fill_missing_values: optional flag for filling in missing values
in matrix (default value is false) data_type: optional filed,
value is one of 'untransformed', 'log2_level', 'log10_level',
'log2_ratio', 'log10_ratio' or 'unknown' (last one is default
value) data_scale: optional parameter (default value is '1.0')) ->
structure: parameter "staging_file_subdir_path" of String,
parameter "workspace_name" of type "workspace_name" (workspace
name of the object), parameter "matrix_name" of String, parameter
"genome_ref" of String, parameter "fill_missing_values" of type
"boolean" (Indicates true or false values, false = 0, true = 1
@range [0,1]), parameter "data_type" of String, parameter
"data_scale" of String
:returns: instance of type "UploadMethodResult" -> structure:
parameter "obj_ref" of type "obj_ref", parameter "report_name" of
type "report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.import_tsv_as_expression_matrix_from_staging',
[params], self._service_ver, context)
def import_reads_from_staging(self, params, context=None):
"""
:param params: instance of type "UploadReadsParams" (sequencing_tech:
sequencing technology name: output reads file name workspace_name:
workspace name/ID of the object import_type: either FASTQ or SRA
For files in user's staging area:
fastq_fwd_or_sra_staging_file_name: single-end fastq file name Or
forward/left paired-end fastq file name from user's staging area
Or SRA staging file fastq_rev_staging_file_name: reverse/right
paired-end fastq file name user's staging area e.g. for file:
/data/bulk/user_name/file_name staging_file_subdir_path is
file_name for file:
/data/bulk/user_name/subdir_1/subdir_2/file_name
staging_file_subdir_path is subdir_1/subdir_2/file_name Optional
Params: single_genome: whether the reads are from a single genome
or a metagenome. interleaved: whether reads is interleaved
insert_size_mean: mean (average) insert length
insert_size_std_dev: standard deviation of insert lengths
read_orientation_outward: whether reads in a pair point outward)
-> structure: parameter "import_type" of String, parameter
"fastq_fwd_staging_file_name" of String, parameter
"fastq_rev_staging_file_name" of String, parameter
"sra_staging_file_name" of String, parameter "sequencing_tech" of
type "sequencing_tech", parameter "workspace_name" of type
"workspace_name" (workspace name of the object), parameter "name"
of String, parameter "single_genome" of type "single_genome",
parameter "interleaved" of type "interleaved", parameter
"insert_size_mean" of type "insert_size_mean", parameter
"insert_size_std_dev" of type "insert_size_std_dev", parameter
"read_orientation_outward" of type "read_orientation_outward"
:returns: instance of type "UploadMethodResult" -> structure:
parameter "obj_ref" of type "obj_ref", parameter "report_name" of
type "report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.import_reads_from_staging',
[params], self._service_ver, context)
def import_tsv_as_phenotype_set_from_staging(self, params, context=None):
"""
:param params: instance of type "FileToPhenotypeSetParams" (required
params: staging_file_subdir_path: subdirectory file path e.g. for
file: /data/bulk/user_name/file_name staging_file_subdir_path is
file_name for file:
/data/bulk/user_name/subdir_1/subdir_2/file_name
staging_file_subdir_path is subdir_1/subdir_2/file_name
phenotype_set_name: output PhenotypeSet object name
workspace_name: workspace name/ID of the object optional: genome:
Genome object that contains features referenced by the Phenotype
Set) -> structure: parameter "staging_file_subdir_path" of String,
parameter "workspace_name" of type "workspace_name" (workspace
name of the object), parameter "phenotype_set_name" of String,
parameter "genome" of type "obj_ref"
:returns: instance of type "UploadMethodResult" -> structure:
parameter "obj_ref" of type "obj_ref", parameter "report_name" of
type "report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.import_tsv_as_phenotype_set_from_staging',
[params], self._service_ver, context)
def import_attribute_mapping_from_staging(self, params, context=None):
"""
:param params: instance of type "FileToConditionSetParams" (required
params: staging_file_subdir_path: subdirectory file path e.g. for
file: /data/bulk/user_name/file_name staging_file_subdir_path is
file_name for file:
/data/bulk/user_name/subdir_1/subdir_2/file_name
staging_file_subdir_path is subdir_1/subdir_2/file_name
attribute_mapping_name: output ConditionSet object name
workspace_id: workspace name/ID of the object) -> structure:
parameter "staging_file_subdir_path" of String, parameter
"workspace_name" of type "workspace_name" (workspace name of the
object), parameter "attribute_mapping_name" of String
:returns: instance of type "UploadMethodResult" -> structure:
parameter "obj_ref" of type "obj_ref", parameter "report_name" of
type "report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.import_attribute_mapping_from_staging',
[params], self._service_ver, context)
def import_eschermap_from_staging(self, params, context=None):
"""
:param params: instance of type "EscherMapParams" -> structure:
parameter "staging_file_subdir_path" of String, parameter
"workspace_id" of Long, parameter "escher_map_name" of String
:returns: instance of type "UploadMethodResult" -> structure:
parameter "obj_ref" of type "obj_ref", parameter "report_name" of
type "report_name", parameter "report_ref" of type "report_ref"
"""
return self._client.run_job('kb_uploadmethods.import_eschermap_from_staging',
[params], self._service_ver, context)
def status(self, context=None):
return self._client.run_job('kb_uploadmethods.status',
[], self._service_ver, context)
| 61.208333
| 100
| 0.67301
| 4,207
| 33,787
| 5.139529
| 0.084859
| 0.040792
| 0.059754
| 0.043705
| 0.824207
| 0.797105
| 0.765424
| 0.744704
| 0.730737
| 0.706225
| 0
| 0.003178
| 0.254891
| 33,787
| 551
| 101
| 61.319419
| 0.85569
| 0.712197
| 0
| 0.227273
| 1
| 0.011364
| 0.169437
| 0.155075
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.022727
| 0.386364
| 0.011364
| 0.886364
| 0.011364
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
2e841bb4ca470bac5c65419e1b8fc0661b9abc21
| 112
|
py
|
Python
|
edge_impulse_linux/__init__.py
|
ShawnHymel/linux-sdk-python
|
2619ec54729a5cfa4ce217aef371e15ad2cb5fbb
|
[
"Apache-2.0"
] | 19
|
2021-04-11T13:40:50.000Z
|
2022-03-29T14:13:57.000Z
|
edge_impulse_linux/__init__.py
|
ShawnHymel/linux-sdk-python
|
2619ec54729a5cfa4ce217aef371e15ad2cb5fbb
|
[
"Apache-2.0"
] | 8
|
2021-04-18T16:39:11.000Z
|
2022-01-06T05:12:42.000Z
|
edge_impulse_linux/__init__.py
|
ShawnHymel/linux-sdk-python
|
2619ec54729a5cfa4ce217aef371e15ad2cb5fbb
|
[
"Apache-2.0"
] | 6
|
2021-04-12T17:34:04.000Z
|
2022-01-08T16:50:10.000Z
|
from edge_impulse_linux import runner
from edge_impulse_linux import audio
from edge_impulse_linux import image
| 28
| 37
| 0.892857
| 18
| 112
| 5.222222
| 0.444444
| 0.255319
| 0.478723
| 0.638298
| 0.829787
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 112
| 3
| 38
| 37.333333
| 0.94
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
cf1d7f7cd9e5a961fffc6c088b772f608f6acb6d
| 17,089
|
py
|
Python
|
employer_engagement/training/levy_train_sql_functions.py
|
SkillsFundingAgency/das-data-management-ai
|
8b517a62f9d78b2af363634c5420e92a9faac03d
|
[
"MIT"
] | null | null | null |
employer_engagement/training/levy_train_sql_functions.py
|
SkillsFundingAgency/das-data-management-ai
|
8b517a62f9d78b2af363634c5420e92a9faac03d
|
[
"MIT"
] | null | null | null |
employer_engagement/training/levy_train_sql_functions.py
|
SkillsFundingAgency/das-data-management-ai
|
8b517a62f9d78b2af363634c5420e92a9faac03d
|
[
"MIT"
] | null | null | null |
from azureml.core import Workspace
from azureml.core.compute import ComputeTarget
from azureml.pipeline.steps import PythonScriptStep
from azureml.pipeline.core import Pipeline, PipelineData, StepSequence, PublishedPipeline
from azureml.core.runconfig import RunConfiguration
from azureml.pipeline.core import PipelineEndpoint
import azureml.core
import os
from azureml.data.datapath import DataPath
from azureml.core import Workspace, Datastore, Dataset, ComputeTarget, Experiment, ScriptRunConfig, Environment, Model
from azureml.core.run import Run
# Set up config of workspace and datastore
aml_workspace = Run.get_context().experiment.workspace
datastore = Datastore.get(aml_workspace, datastore_name='datamgmtdb')
def levy_train_01_accounts(top_x: str) :
query_levy_accounts = DataPath(datastore, "select A1, A2, A3, early_adopter from \
(select top {} A1, A2, A3, CASE WHEN CAST(A2 AS DATE)<cast('2017-07-01' as date) THEN 1 ELSE 0 END AS early_adopter from PDS_AI.PT_A \
where a2 between '2017-01-01' and '2018-01-01' and a1=1 \
order by rand()) a \
union \
select A1, A2, A3, early_adopter from \
(select top {} A1, A2, A3, CASE WHEN CAST(A2 AS DATE)<cast('2017-07-01' as date) THEN 1 ELSE 0 END AS early_adopter \
from PDS_AI.PT_A where a2 between '2017-01-01' and '2018-01-01' and a1=0 order by rand()) b \
union \
select A1, A2, A3, early_adopter from \
(select top {} A1, A2, A3, CASE WHEN CAST(A2 AS DATE)<cast('2017-07-01' as date) THEN 1 ELSE 0 END AS early_adopter \
from PDS_AI.PT_A \
where a2 between '2018-01-01' and '2019-01-01' and a1=1 \
order by rand()) c \
union \
select A1, A2, A3, early_adopter \
from \
(select top {} A1, A2, A3, CASE WHEN CAST(A2 AS DATE)<cast('2017-07-01' as date) THEN 1 ELSE 0 END AS early_adopter \
from PDS_AI.PT_A \
where a2 between '2018-01-01' and '2019-01-01' and a1=0 \
order by rand()) c \
union \
select A1, A2, A3, early_adopter from \
(select top {} A1, A2, A3, CASE WHEN CAST(A2 AS DATE)<cast('2017-07-01' as date) THEN 1 ELSE 0 END AS early_adopter \
from PDS_AI.PT_A \
where a2 between '2019-01-01' and '2020-01-01' and a1=1 \
order by rand()) c \
union \
select A1, A2, A3, early_adopter \
from \
(select top {} A1, A2, A3, CASE WHEN CAST(A2 AS DATE)<cast('2017-07-01' as date) THEN 1 ELSE 0 END AS early_adopter \
from PDS_AI.PT_A \
where a2 between '2019-01-01' and '2020-01-01' and a1=0 \
order by rand()) c \
union \
select A1, A2, A3, early_adopter \
from \
(select top {} A1, A2, A3, CASE WHEN CAST(A2 AS DATE)<cast('2017-07-01' as date) THEN 1 ELSE 0 END AS early_adopter \
from PDS_AI.PT_A \
where a2 between '2020-01-01' and '2021-01-01' and a1=1 \
order by rand()) c \
union \
select A1, A2, A3, early_adopter \
from \
(select top {} A1, A2, A3, CASE WHEN CAST(A2 AS DATE)<cast('2017-07-01' as date) THEN 1 ELSE 0 END AS early_adopter \
from PDS_AI.PT_A \
where a2 between '2020-01-01' and '2021-01-01' and a1=0 \
order by rand()) c \
union \
select A1, A2, A3, early_adopter \
from \
(select top {} A1, A2, A3, CASE WHEN CAST(A2 AS DATE)<cast('2017-07-01' as date) THEN 1 ELSE 0 END AS early_adopter \
from PDS_AI.PT_A \
where a2 between '2021-01-01' and '2022-01-01' and a1=1 \
order by rand()) c \
union \
select A1, A2, A3, early_adopter \
from \
(select top {} A1, A2, A3, CASE WHEN CAST(A2 AS DATE)<cast('2017-07-01' as date) THEN 1 ELSE 0 END AS early_adopter \
from PDS_AI.PT_A \
where a2 between '2021-01-01' and '2022-01-01' and a1=0 \
order by rand()) c \
union \
select A1, A2, A3, early_adopter \
from \
(select top {} A1, A2, A3, CASE WHEN CAST(A2 AS DATE)<cast('2017-07-01' as date) THEN 1 ELSE 0 END AS early_adopter \
from PDS_AI.PT_A \
where a2 between '2022-01-01' and '2023-01-01' and a1=1 \
order by rand()) c \
union \
select A1, A2, A3, early_adopter \
from \
(select top {} A1, A2, A3, CASE WHEN CAST(A2 AS DATE)<cast('2017-07-01' as date) THEN 1 ELSE 0 END AS early_adopter \
from PDS_AI.PT_A \
where a2 between '2022-01-01' and '2023-01-01' and a1=0 \
order by rand()) c".format(top_x,top_x,top_x,top_x,top_x,top_x,top_x,top_x,top_x,top_x,top_x,top_x))
tabular_levy_accounts = Dataset.Tabular.from_sql_query(query_levy_accounts, query_timeout=3600)
levy_model_accounts = tabular_levy_accounts.to_pandas_dataframe()
return levy_model_accounts
def levy_train_02_levy_model_set_2018_2019_part1(sql_account_list: str) :
query_2018_2019_part1 = DataPath(datastore, "SELECT A3 \
, '2019' as cohort \
, total_commitments \
, occupation_1 \
, occupation_2 \
, occupation_3 \
, occupation_7 \
, occupation_13 \
, occupation_14 \
, occupation_15 \
, occupation_17 \
, occupation_20 \
, occupation_22 \
, occupation_24 \
, occupation_null \
, prev_12m_new_commitments \
, prev_12m_new_levy_transfers \
, A7 as levy_sending_company \
FROM \
(SELECT A3, CONCAT(YEAR(A2),'-',month(A2)) as yearmon_created, A1 as levy_split, A2, A7 \
FROM PDS_AI.PT_A \
WHERE A2<'2018-04-01' AND A1=1 AND A3 in ({0}) \
) A \
LEFT JOIN \
(SELECT B10, count(*) AS total_commitments \
FROM PDS_AI.PT_B \
WHERE cast(B2 as date) >= '2018-04-01' AND cast(B2 as date) < '2019-04-01' AND B10 in ({0}) \
GROUP BY B10 \
) B \
ON A.A3=B.B10 \
LEFT JOIN \
(SELECT B10 \
, COUNT(*) AS prev_12m_new_commitments \
, SUM(CASE WHEN B12=1 THEN 1 ELSE 0 END) AS prev_12m_new_levy_transfers \
, CAST(SUM(CASE WHEN B6 = '1' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_1 \
, CAST(SUM(CASE WHEN B6 = '2' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_2 \
, CAST(SUM(CASE WHEN B6 = '3' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_3 \
, CAST(SUM(CASE WHEN B6 = '7' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_7 \
, CAST(SUM(CASE WHEN B6 = '13' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_13 \
, CAST(SUM(CASE WHEN B6 = '14' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_14 \
, CAST(SUM(CASE WHEN B6 = '15' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_15 \
, CAST(SUM(CASE WHEN B6 = '17' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_17 \
, CAST(SUM(CASE WHEN B6 = '20' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_20 \
, CAST(SUM(CASE WHEN B6 = '22' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_22 \
, CAST(SUM(CASE WHEN B6 = '24' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_24 \
, CAST(SUM(CASE WHEN B6 = NULL THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_null \
FROM PDS_AI.PT_B \
WHERE cast(B2 as date) >= '2017-04-01' AND cast(B2 as date) < '2018-04-01' AND B10 in ({0}) \
GROUP BY B10 \
) C \
ON A.A3=C.B10".format(sql_account_list))
tabular_2018_2019_part1 = Dataset.Tabular.from_sql_query(query_2018_2019_part1, query_timeout=3600)
levy_model_set_2018_2019_part1 = tabular_2018_2019_part1.to_pandas_dataframe()
return levy_model_set_2018_2019_part1
def levy_train_03_levy_model_set_2018_2019_part2(sql_account_list: str) :
query_2018_2019_part2 = DataPath(datastore, "SELECT A3 \
, commitments_ending_12m \
, current_live_commitments \
FROM \
(SELECT A3, CONCAT(YEAR(A2),'-',month(A2)) as yearmon_created, A1 as levy_split, A2, A7 \
FROM PDS_AI.PT_A \
WHERE A2<'2018-04-01' and A1=1 AND A3 in ({0}) \
) A \
LEFT JOIN \
(SELECT B10 \
, COUNT(*) AS commitments_ending_12m \
FROM PDS_AI.PT_B \
WHERE CAST(B17 AS DATE) < '2019-04-01' AND CAST(B17 AS DATE)>='2018-04-01' \
AND (CAST(B20 AS DATE) >= '2018-04-01' OR B20 IS NULL) \
AND (CAST(B16 AS DATE) >= '2018-04-01' OR B16 IS NULL) \
AND B10 in ({0}) \
GROUP BY B10 \
) D \
ON A.A3=D.B10 \
LEFT JOIN \
(SELECT B10 \
, COUNT(*) AS current_live_commitments \
FROM PDS_AI.PT_B \
WHERE cast(B2 AS DATE) < '2018-04-01' AND \
(B20 IS NULL OR CAST(B20 AS DATE)>='2018-04-01') AND \
(B16 IS NULL OR CAST(B16 AS DATE)>='2018-04-01') \
AND B10 in ({0}) \
GROUP BY B10 \
) E \
ON A.A3=E.B10".format(sql_account_list))
tabular_2018_2019_part2 = Dataset.Tabular.from_sql_query(query_2018_2019_part2, query_timeout=3600)
levy_model_set_2018_2019_part2 = tabular_2018_2019_part2.to_pandas_dataframe()
return levy_model_set_2018_2019_part2
def levy_train_04_levy_model_set_2019_2020_part1(sql_account_list: str) :
query_2019_2020_part1 = DataPath(datastore, "SELECT A3 \
, '2020' as cohort \
, total_commitments \
, occupation_1 \
, occupation_2 \
, occupation_3 \
, occupation_7 \
, occupation_13 \
, occupation_14 \
, occupation_15 \
, occupation_17 \
, occupation_20 \
, occupation_22 \
, occupation_24 \
, occupation_null \
, prev_12m_new_commitments \
, prev_12m_new_levy_transfers \
, A7 as levy_sending_company \
FROM \
(SELECT A3, CONCAT(YEAR(A2),'-',month(A2)) as yearmon_created, A1 as levy_split, A2, A7 \
FROM PDS_AI.PT_A \
WHERE A2<'2019-04-01' AND A1=1 AND A3 in ({0}) \
) A \
LEFT JOIN \
(SELECT B10, count(*) AS total_commitments \
FROM PDS_AI.PT_B \
WHERE cast(B2 as date) >= '2019-04-01' AND cast(B2 as date) < '2020-04-01' AND B10 in ({0}) \
GROUP BY B10 \
) B \
ON A.A3=B.B10 \
LEFT JOIN \
(SELECT B10 \
, COUNT(*) AS prev_12m_new_commitments \
, SUM(CASE WHEN B12=1 THEN 1 ELSE 0 END) AS prev_12m_new_levy_transfers \
, CAST(SUM(CASE WHEN B6 = '1' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_1 \
, CAST(SUM(CASE WHEN B6 = '2' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_2 \
, CAST(SUM(CASE WHEN B6 = '3' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_3 \
, CAST(SUM(CASE WHEN B6 = '7' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_7 \
, CAST(SUM(CASE WHEN B6 = '13' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_13 \
, CAST(SUM(CASE WHEN B6 = '14' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_14 \
, CAST(SUM(CASE WHEN B6 = '15' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_15 \
, CAST(SUM(CASE WHEN B6 = '17' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_17 \
, CAST(SUM(CASE WHEN B6 = '20' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_20 \
, CAST(SUM(CASE WHEN B6 = '22' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_22 \
, CAST(SUM(CASE WHEN B6 = '24' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_24 \
, CAST(SUM(CASE WHEN B6 = NULL THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_null \
FROM PDS_AI.PT_B \
WHERE cast(B2 as date) >= '2018-04-01' AND cast(B2 as date) < '2019-04-01' AND B10 in ({0}) \
GROUP BY B10 \
) C \
ON A.A3=C.B10".format(sql_account_list))
tabular_2019_2020_part1 = Dataset.Tabular.from_sql_query(query_2019_2020_part1, query_timeout=3600)
levy_model_set_2019_2020_part1 = tabular_2019_2020_part1.to_pandas_dataframe()
return levy_model_set_2019_2020_part1
def levy_train_05_levy_model_set_2019_2020_part2(sql_account_list: str) :
query_2019_2020_part2 = DataPath(datastore, "SELECT A3 \
, commitments_ending_12m \
, current_live_commitments \
FROM \
(SELECT A3, CONCAT(YEAR(A2),'-',month(A2)) as yearmon_created, A1 as levy_split, A2, A7 \
FROM PDS_AI.PT_A \
WHERE A2<'2019-04-01' AND A1=1 AND A3 in ({0}) \
) A \
LEFT JOIN \
(SELECT B10 \
, COUNT(*) AS commitments_ending_12m \
FROM PDS_AI.PT_B \
WHERE CAST(B17 AS DATE) < '2020-04-01' AND CAST(B17 AS DATE)>='2019-04-01' \
AND (CAST(B20 AS DATE) >= '2019-04-01' OR B20 IS NULL) \
AND (CAST(B16 AS DATE) >= '2019-04-01' OR B16 IS NULL) \
AND B10 in ({0}) \
GROUP BY B10 \
) D \
ON A.A3=D.B10 \
LEFT JOIN \
(SELECT B10 \
, COUNT(*) AS current_live_commitments \
FROM PDS_AI.PT_B \
WHERE cast(B2 AS DATE) < '2019-04-01' AND \
(B20 IS NULL OR CAST(B20 AS DATE)>='2019-04-01') AND \
(B16 IS NULL OR CAST(B16 AS DATE)>='2019-04-01') \
AND B10 in ({0}) \
GROUP BY B10 \
) E \
ON A.A3=E.B10".format(sql_account_list))
tabular_2019_2020_part2 = Dataset.Tabular.from_sql_query(query_2019_2020_part2, query_timeout=3600)
levy_model_set_2019_2020_part2 = tabular_2019_2020_part2.to_pandas_dataframe()
return levy_model_set_2019_2020_part2
def levy_train_06_levy_model_set_2022_part1(sql_account_list: str) :
query_2022_part1 = DataPath(datastore, "SELECT A3 \
, '2022' as cohort \
, total_commitments \
, occupation_1 \
, occupation_2 \
, occupation_3 \
, occupation_7 \
, occupation_13 \
, occupation_14 \
, occupation_15 \
, occupation_17 \
, occupation_20 \
, occupation_22 \
, occupation_24 \
, occupation_null \
, prev_12m_new_commitments \
, prev_12m_new_levy_transfers \
, A7 as levy_sending_company \
FROM \
(SELECT A3, CONCAT(YEAR(A2),'-',month(A2)) as yearmon_created, A1 as levy_split, A2, A7 \
FROM PDS_AI.PT_A \
WHERE A2<'2021-01-01' AND A1=1 AND A3 in ({0}) \
) A \
LEFT JOIN \
(SELECT B10, count(*) AS total_commitments \
FROM PDS_AI.PT_B \
WHERE cast(B2 as date) >= '2021-01-01' AND cast(B2 as date) < '2022-01-01' AND B10 in ({0}) \
GROUP BY B10 \
) B \
ON A.A3=B.B10 \
LEFT JOIN \
(SELECT B10 \
, COUNT(*) AS prev_12m_new_commitments \
, SUM(CASE WHEN B12=1 THEN 1 ELSE 0 END) AS prev_12m_new_levy_transfers \
, CAST(SUM(CASE WHEN B6 = '1' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_1 \
, CAST(SUM(CASE WHEN B6 = '2' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_2 \
, CAST(SUM(CASE WHEN B6 = '3' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_3 \
, CAST(SUM(CASE WHEN B6 = '7' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_7 \
, CAST(SUM(CASE WHEN B6 = '13' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_13 \
, CAST(SUM(CASE WHEN B6 = '14' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_14 \
, CAST(SUM(CASE WHEN B6 = '15' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_15 \
, CAST(SUM(CASE WHEN B6 = '17' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_17 \
, CAST(SUM(CASE WHEN B6 = '20' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_20 \
, CAST(SUM(CASE WHEN B6 = '22' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_22 \
, CAST(SUM(CASE WHEN B6 = '24' THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_24 \
, CAST(SUM(CASE WHEN B6 = NULL THEN 1.000 ELSE 0 END) / COUNT(*) AS DECIMAL(10,3)) AS occupation_null \
FROM PDS_AI.PT_B \
WHERE cast(B2 as date) >= '2020-01-01' AND cast(B2 as date) < '2021-01-01' AND B10 in ({0}) \
GROUP BY B10 \
) C \
ON A.A3=C.B10".format(sql_account_list))
tabular_2022_part1 = Dataset.Tabular.from_sql_query(query_2022_part1, query_timeout=3600)
levy_model_set_2022_part1 = tabular_2022_part1.to_pandas_dataframe()
return levy_model_set_2022_part1
def levy_train_07_levy_model_set_2022_part2(sql_account_list: str) :
query_2022_part2 = DataPath(datastore, "SELECT A3 \
, commitments_ending_12m \
, current_live_commitments \
FROM \
(SELECT A3, CONCAT(YEAR(A2),'-',month(A2)) as yearmon_created, A1 as levy_split, A2, A7 \
FROM PDS_AI.PT_A \
WHERE A2<'2021-01-01' AND A1=1 AND A3 in ({0}) \
) A \
LEFT JOIN \
(SELECT B10 \
, COUNT(*) AS commitments_ending_12m \
FROM PDS_AI.PT_B \
WHERE cast(B17 as date) < '2022-01-01' AND CAST(B17 AS DATE)>='2021-01-01' \
AND (CAST(B20 AS DATE) >= '2021-01-01' OR B20 IS NULL) \
AND (CAST(B16 AS DATE) >= '2021-01-01' OR B16 IS NULL) \
AND B10 in ({0}) \
GROUP BY B10 \
) D \
ON A.A3=D.B10 \
LEFT JOIN \
(SELECT B10 \
, COUNT(*) AS current_live_commitments \
FROM PDS_AI.PT_B \
WHERE cast(B2 AS DATE) < '2021-01-01' AND \
(B20 IS NULL OR CAST(B20 AS DATE)>='2021-01-01') AND \
(B16 IS NULL OR CAST(B16 AS DATE)>='2021-01-01') \
AND B10 in ({0}) \
GROUP BY B10 \
) E \
ON A.A3=E.B10".format(sql_account_list))
tabular_2022_part2 = Dataset.Tabular.from_sql_query(query_2022_part2, query_timeout=3600)
levy_model_set_2022_part2 = tabular_2022_part2.to_pandas_dataframe()
return levy_model_set_2022_part2
| 45.08971
| 139
| 0.651238
| 2,932
| 17,089
| 3.624147
| 0.051501
| 0.032185
| 0.038396
| 0.050819
| 0.923772
| 0.896574
| 0.868812
| 0.823546
| 0.77875
| 0.757105
| 0
| 0.144985
| 0.222248
| 17,089
| 378
| 140
| 45.208995
| 0.654503
| 0.002341
| 0
| 0.75
| 0
| 0.225
| 0.048806
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019444
| false
| 0
| 0.030556
| 0
| 0.069444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cf29208c0394dd4e8388060fe80e7d6d37890206
| 3,068
|
py
|
Python
|
tests/crypto/test_factory.py
|
reputage/didery
|
f94a3cf63a7be2a341fa06d173d068924e540e41
|
[
"Apache-2.0"
] | 8
|
2018-09-07T09:26:52.000Z
|
2021-01-16T12:22:07.000Z
|
tests/crypto/test_factory.py
|
reputage/didery
|
f94a3cf63a7be2a341fa06d173d068924e540e41
|
[
"Apache-2.0"
] | 184
|
2018-04-19T17:46:02.000Z
|
2019-05-21T19:04:30.000Z
|
tests/crypto/test_factory.py
|
reputage/didery
|
f94a3cf63a7be2a341fa06d173d068924e540e41
|
[
"Apache-2.0"
] | 3
|
2018-09-26T19:16:30.000Z
|
2018-12-18T18:50:40.000Z
|
import didery.crypto.eddsa as eddsa
import didery.crypto.ecdsa as ecdsa
import didery.crypto.factory as factory
from collections import OrderedDict as ODict
from didery.help import helping as h
def testECDSAFactory():
sigs = ODict()
sigs["name"] = "ECDSA"
validator = factory.signatureValidationFactory(sigs)
assert validator is not None
# test that factory returned ECDSA validator
vk, sk, did, body = ecdsa.genDidHistory(numSigners=2)
vk = h.bytesToStr64u(vk)
signature = ecdsa.signResource(body, sk)
valid = validator(signature, body.decode(), vk)
assert valid
sigs["name"] = "secp256k1"
validator = factory.signatureValidationFactory(sigs)
assert validator is not None
# test that factory returned ECDSA validator
vk, sk, did, body = ecdsa.genDidHistory(numSigners=2)
vk = h.bytesToStr64u(vk)
signature = ecdsa.signResource(body, sk)
valid = validator(signature, body.decode(), vk)
assert valid
def testEdDSAFactory():
sigs = ODict()
sigs["name"] = "EdDSA"
validator = factory.signatureValidationFactory(sigs)
assert validator is not None
# test that factory returned EdDSA validator
seed = b'\x92[\xcb\xf4\xee5+\xcf\xd4b*%/\xabw8\xd4d\xa2\xf8\xad\xa7U\x19,\xcfS\x12\xa6l\xba"'
vk, sk, did, body = eddsa.genDidHistory(seed, signer=0, numSigners=2)
vk = h.bytesToStr64u(vk)
signature = eddsa.signResource(body, sk)
valid = validator(signature, body.decode(), vk)
assert valid
sigs["name"] = "Ed25519"
validator = factory.signatureValidationFactory(sigs)
assert validator is not None
# test that factory returned EdDSA validator
seed = b'\x92[\xcb\xf4\xee5+\xcf\xd4b*%/\xabw8\xd4d\xa2\xf8\xad\xa7U\x19,\xcfS\x12\xa6l\xba"'
vk, sk, did, body = eddsa.genDidHistory(seed, signer=0, numSigners=2)
vk = h.bytesToStr64u(vk)
signature = eddsa.signResource(body, sk)
valid = validator(signature, body.decode(), vk)
assert valid
def testInvalidKind():
sigs = ODict()
sigs["name"] = "InvalidSuperCrypto"
validator = factory.signatureValidationFactory(sigs)
assert validator is not None
# test that factory returned default EdDSA validator
seed = b'\x92[\xcb\xf4\xee5+\xcf\xd4b*%/\xabw8\xd4d\xa2\xf8\xad\xa7U\x19,\xcfS\x12\xa6l\xba"'
vk, sk, did, body = eddsa.genDidHistory(seed, signer=0, numSigners=2)
vk = h.bytesToStr64u(vk)
signature = eddsa.signResource(body, sk)
valid = validator(signature, body.decode(), vk)
assert valid
def testEmptyDict():
sigs = ODict()
validator = factory.signatureValidationFactory(sigs)
assert validator is not None
# test that factory returned default EdDSA validator
seed = b'\x92[\xcb\xf4\xee5+\xcf\xd4b*%/\xabw8\xd4d\xa2\xf8\xad\xa7U\x19,\xcfS\x12\xa6l\xba"'
vk, sk, did, body = eddsa.genDidHistory(seed, signer=0, numSigners=2)
vk = h.bytesToStr64u(vk)
signature = eddsa.signResource(body, sk)
valid = validator(signature, body.decode(), vk)
assert valid
| 27.63964
| 97
| 0.693286
| 392
| 3,068
| 5.42602
| 0.170918
| 0.045134
| 0.118477
| 0.12976
| 0.844852
| 0.844852
| 0.844852
| 0.844852
| 0.844852
| 0.844852
| 0
| 0.036502
| 0.187419
| 3,068
| 110
| 98
| 27.890909
| 0.816687
| 0.088983
| 0
| 0.78125
| 0
| 0.0625
| 0.142037
| 0.119082
| 0
| 0
| 0
| 0
| 0.1875
| 1
| 0.0625
| false
| 0
| 0.078125
| 0
| 0.140625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cf8a8aa47a90f40ac457cec20891d94db4a3dbd3
| 177,014
|
py
|
Python
|
boto3_type_annotations_with_docs/boto3_type_annotations/ec2/waiter.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 119
|
2018-12-01T18:20:57.000Z
|
2022-02-02T10:31:29.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/ec2/waiter.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 15
|
2018-11-16T00:16:44.000Z
|
2021-11-13T03:44:18.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/ec2/waiter.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 11
|
2019-05-06T05:26:51.000Z
|
2021-09-28T15:27:59.000Z
|
from typing import Dict
from typing import List
from botocore.waiter import Waiter
class BundleTaskComplete(Waiter):
def wait(self, BundleIds: List = None, Filters: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_bundle_tasks` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeBundleTasks>`_
**Request Syntax**
::
waiter.wait(
BundleIds=[
'string',
],
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
DryRun=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type BundleIds: list
:param BundleIds:
The bundle task IDs.
Default: Describes all your bundle tasks.
- *(string) --*
:type Filters: list
:param Filters:
The filters.
* ``bundle-id`` - The ID of the bundle task.
* ``error-code`` - If the task failed, the error code returned.
* ``error-message`` - If the task failed, the error message returned.
* ``instance-id`` - The ID of the instance.
* ``progress`` - The level of task completion, as a percentage (for example, 20%).
* ``s3-bucket`` - The Amazon S3 bucket to store the AMI.
* ``s3-prefix`` - The beginning of the AMI name.
* ``start-time`` - The time the task started (for example, 2013-09-15T17:15:20.000Z).
* ``state`` - The state of the task (``pending`` | ``waiting-for-shutdown`` | ``bundling`` | ``storing`` | ``cancelling`` | ``complete`` | ``failed`` ).
* ``update-time`` - The time of the most recent update for the task.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class ConversionTaskCancelled(Waiter):
def wait(self, ConversionTaskIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_conversion_tasks` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeConversionTasks>`_
**Request Syntax**
::
waiter.wait(
ConversionTaskIds=[
'string',
],
DryRun=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type ConversionTaskIds: list
:param ConversionTaskIds:
The conversion task IDs.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class ConversionTaskCompleted(Waiter):
def wait(self, ConversionTaskIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_conversion_tasks` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeConversionTasks>`_
**Request Syntax**
::
waiter.wait(
ConversionTaskIds=[
'string',
],
DryRun=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type ConversionTaskIds: list
:param ConversionTaskIds:
The conversion task IDs.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class ConversionTaskDeleted(Waiter):
def wait(self, ConversionTaskIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_conversion_tasks` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeConversionTasks>`_
**Request Syntax**
::
waiter.wait(
ConversionTaskIds=[
'string',
],
DryRun=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type ConversionTaskIds: list
:param ConversionTaskIds:
The conversion task IDs.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class CustomerGatewayAvailable(Waiter):
def wait(self, CustomerGatewayIds: List = None, Filters: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_customer_gateways` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeCustomerGateways>`_
**Request Syntax**
::
waiter.wait(
CustomerGatewayIds=[
'string',
],
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
DryRun=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type CustomerGatewayIds: list
:param CustomerGatewayIds:
One or more customer gateway IDs.
Default: Describes all your customer gateways.
- *(string) --*
:type Filters: list
:param Filters:
One or more filters.
* ``bgp-asn`` - The customer gateway\'s Border Gateway Protocol (BGP) Autonomous System Number (ASN).
* ``customer-gateway-id`` - The ID of the customer gateway.
* ``ip-address`` - The IP address of the customer gateway\'s Internet-routable external interface.
* ``state`` - The state of the customer gateway (``pending`` | ``available`` | ``deleting`` | ``deleted`` ).
* ``type`` - The type of customer gateway. Currently, the only supported type is ``ipsec.1`` .
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class ExportTaskCancelled(Waiter):
def wait(self, ExportTaskIds: List = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_export_tasks` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeExportTasks>`_
**Request Syntax**
::
waiter.wait(
ExportTaskIds=[
'string',
],
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type ExportTaskIds: list
:param ExportTaskIds:
The export task IDs.
- *(string) --*
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class ExportTaskCompleted(Waiter):
def wait(self, ExportTaskIds: List = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_export_tasks` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeExportTasks>`_
**Request Syntax**
::
waiter.wait(
ExportTaskIds=[
'string',
],
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type ExportTaskIds: list
:param ExportTaskIds:
The export task IDs.
- *(string) --*
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class ImageAvailable(Waiter):
def wait(self, ExecutableUsers: List = None, Filters: List = None, ImageIds: List = None, Owners: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_images` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeImages>`_
**Request Syntax**
::
waiter.wait(
ExecutableUsers=[
'string',
],
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
ImageIds=[
'string',
],
Owners=[
'string',
],
DryRun=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type ExecutableUsers: list
:param ExecutableUsers:
Scopes the images by users with explicit launch permissions. Specify an AWS account ID, ``self`` (the sender of the request), or ``all`` (public AMIs).
- *(string) --*
:type Filters: list
:param Filters:
The filters.
* ``architecture`` - The image architecture (``i386`` | ``x86_64`` ).
* ``block-device-mapping.delete-on-termination`` - A Boolean value that indicates whether the Amazon EBS volume is deleted on instance termination.
* ``block-device-mapping.device-name`` - The device name specified in the block device mapping (for example, ``/dev/sdh`` or ``xvdh`` ).
* ``block-device-mapping.snapshot-id`` - The ID of the snapshot used for the EBS volume.
* ``block-device-mapping.volume-size`` - The volume size of the EBS volume, in GiB.
* ``block-device-mapping.volume-type`` - The volume type of the EBS volume (``gp2`` | ``io1`` | ``st1`` | ``sc1`` | ``standard`` ).
* ``block-device-mapping.encrypted`` - A Boolean that indicates whether the EBS volume is encrypted.
* ``description`` - The description of the image (provided during image creation).
* ``ena-support`` - A Boolean that indicates whether enhanced networking with ENA is enabled.
* ``hypervisor`` - The hypervisor type (``ovm`` | ``xen`` ).
* ``image-id`` - The ID of the image.
* ``image-type`` - The image type (``machine`` | ``kernel`` | ``ramdisk`` ).
* ``is-public`` - A Boolean that indicates whether the image is public.
* ``kernel-id`` - The kernel ID.
* ``manifest-location`` - The location of the image manifest.
* ``name`` - The name of the AMI (provided during image creation).
* ``owner-alias`` - String value from an Amazon-maintained list (``amazon`` | ``aws-marketplace`` | ``microsoft`` ) of snapshot owners. Not to be confused with the user-configured AWS account alias, which is set from the IAM console.
* ``owner-id`` - The AWS account ID of the image owner.
* ``platform`` - The platform. To only list Windows-based AMIs, use ``windows`` .
* ``product-code`` - The product code.
* ``product-code.type`` - The type of the product code (``devpay`` | ``marketplace`` ).
* ``ramdisk-id`` - The RAM disk ID.
* ``root-device-name`` - The device name of the root device volume (for example, ``/dev/sda1`` ).
* ``root-device-type`` - The type of the root device volume (``ebs`` | ``instance-store`` ).
* ``state`` - The state of the image (``available`` | ``pending`` | ``failed`` ).
* ``state-reason-code`` - The reason code for the state change.
* ``state-reason-message`` - The message for the state change.
* ``sriov-net-support`` - A value of ``simple`` indicates that enhanced networking with the Intel 82599 VF interface is enabled.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``virtualization-type`` - The virtualization type (``paravirtual`` | ``hvm`` ).
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type ImageIds: list
:param ImageIds:
The image IDs.
Default: Describes all images available to you.
- *(string) --*
:type Owners: list
:param Owners:
Filters the images by the owner. Specify an AWS account ID, ``self`` (owner is the sender of the request), or an AWS owner alias (valid values are ``amazon`` | ``aws-marketplace`` | ``microsoft`` ). Omitting this option returns all images for which you have launch permissions, regardless of ownership.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class ImageExists(Waiter):
def wait(self, ExecutableUsers: List = None, Filters: List = None, ImageIds: List = None, Owners: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_images` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeImages>`_
**Request Syntax**
::
waiter.wait(
ExecutableUsers=[
'string',
],
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
ImageIds=[
'string',
],
Owners=[
'string',
],
DryRun=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type ExecutableUsers: list
:param ExecutableUsers:
Scopes the images by users with explicit launch permissions. Specify an AWS account ID, ``self`` (the sender of the request), or ``all`` (public AMIs).
- *(string) --*
:type Filters: list
:param Filters:
The filters.
* ``architecture`` - The image architecture (``i386`` | ``x86_64`` ).
* ``block-device-mapping.delete-on-termination`` - A Boolean value that indicates whether the Amazon EBS volume is deleted on instance termination.
* ``block-device-mapping.device-name`` - The device name specified in the block device mapping (for example, ``/dev/sdh`` or ``xvdh`` ).
* ``block-device-mapping.snapshot-id`` - The ID of the snapshot used for the EBS volume.
* ``block-device-mapping.volume-size`` - The volume size of the EBS volume, in GiB.
* ``block-device-mapping.volume-type`` - The volume type of the EBS volume (``gp2`` | ``io1`` | ``st1`` | ``sc1`` | ``standard`` ).
* ``block-device-mapping.encrypted`` - A Boolean that indicates whether the EBS volume is encrypted.
* ``description`` - The description of the image (provided during image creation).
* ``ena-support`` - A Boolean that indicates whether enhanced networking with ENA is enabled.
* ``hypervisor`` - The hypervisor type (``ovm`` | ``xen`` ).
* ``image-id`` - The ID of the image.
* ``image-type`` - The image type (``machine`` | ``kernel`` | ``ramdisk`` ).
* ``is-public`` - A Boolean that indicates whether the image is public.
* ``kernel-id`` - The kernel ID.
* ``manifest-location`` - The location of the image manifest.
* ``name`` - The name of the AMI (provided during image creation).
* ``owner-alias`` - String value from an Amazon-maintained list (``amazon`` | ``aws-marketplace`` | ``microsoft`` ) of snapshot owners. Not to be confused with the user-configured AWS account alias, which is set from the IAM console.
* ``owner-id`` - The AWS account ID of the image owner.
* ``platform`` - The platform. To only list Windows-based AMIs, use ``windows`` .
* ``product-code`` - The product code.
* ``product-code.type`` - The type of the product code (``devpay`` | ``marketplace`` ).
* ``ramdisk-id`` - The RAM disk ID.
* ``root-device-name`` - The device name of the root device volume (for example, ``/dev/sda1`` ).
* ``root-device-type`` - The type of the root device volume (``ebs`` | ``instance-store`` ).
* ``state`` - The state of the image (``available`` | ``pending`` | ``failed`` ).
* ``state-reason-code`` - The reason code for the state change.
* ``state-reason-message`` - The message for the state change.
* ``sriov-net-support`` - A value of ``simple`` indicates that enhanced networking with the Intel 82599 VF interface is enabled.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``virtualization-type`` - The virtualization type (``paravirtual`` | ``hvm`` ).
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type ImageIds: list
:param ImageIds:
The image IDs.
Default: Describes all images available to you.
- *(string) --*
:type Owners: list
:param Owners:
Filters the images by the owner. Specify an AWS account ID, ``self`` (owner is the sender of the request), or an AWS owner alias (valid values are ``amazon`` | ``aws-marketplace`` | ``microsoft`` ). Omitting this option returns all images for which you have launch permissions, regardless of ownership.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class InstanceExists(Waiter):
def wait(self, Filters: List = None, InstanceIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_instances` every 5 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeInstances>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
InstanceIds=[
'string',
],
DryRun=True|False,
MaxResults=123,
NextToken='string',
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
The filters.
* ``affinity`` - The affinity setting for an instance running on a Dedicated Host (``default`` | ``host`` ).
* ``architecture`` - The instance architecture (``i386`` | ``x86_64`` ).
* ``availability-zone`` - The Availability Zone of the instance.
* ``block-device-mapping.attach-time`` - The attach time for an EBS volume mapped to the instance, for example, ``2010-09-15T17:15:20.000Z`` .
* ``block-device-mapping.delete-on-termination`` - A Boolean that indicates whether the EBS volume is deleted on instance termination.
* ``block-device-mapping.device-name`` - The device name specified in the block device mapping (for example, ``/dev/sdh`` or ``xvdh`` ).
* ``block-device-mapping.status`` - The status for the EBS volume (``attaching`` | ``attached`` | ``detaching`` | ``detached`` ).
* ``block-device-mapping.volume-id`` - The volume ID of the EBS volume.
* ``client-token`` - The idempotency token you provided when you launched the instance.
* ``dns-name`` - The public DNS name of the instance.
* ``group-id`` - The ID of the security group for the instance. EC2-Classic only.
* ``group-name`` - The name of the security group for the instance. EC2-Classic only.
* ``hibernation-options.configured`` - A Boolean that indicates whether the instance is enabled for hibernation. A value of ``true`` means that the instance is enabled for hibernation.
* ``host-id`` - The ID of the Dedicated Host on which the instance is running, if applicable.
* ``hypervisor`` - The hypervisor type of the instance (``ovm`` | ``xen`` ).
* ``iam-instance-profile.arn`` - The instance profile associated with the instance. Specified as an ARN.
* ``image-id`` - The ID of the image used to launch the instance.
* ``instance-id`` - The ID of the instance.
* ``instance-lifecycle`` - Indicates whether this is a Spot Instance or a Scheduled Instance (``spot`` | ``scheduled`` ).
* ``instance-state-code`` - The state of the instance, as a 16-bit unsigned integer. The high byte is used for internal purposes and should be ignored. The low byte is set based on the state represented. The valid values are: 0 (pending), 16 (running), 32 (shutting-down), 48 (terminated), 64 (stopping), and 80 (stopped).
* ``instance-state-name`` - The state of the instance (``pending`` | ``running`` | ``shutting-down`` | ``terminated`` | ``stopping`` | ``stopped`` ).
* ``instance-type`` - The type of instance (for example, ``t2.micro`` ).
* ``instance.group-id`` - The ID of the security group for the instance.
* ``instance.group-name`` - The name of the security group for the instance.
* ``ip-address`` - The public IPv4 address of the instance.
* ``kernel-id`` - The kernel ID.
* ``key-name`` - The name of the key pair used when the instance was launched.
* ``launch-index`` - When launching multiple instances, this is the index for the instance in the launch group (for example, 0, 1, 2, and so on).
* ``launch-time`` - The time when the instance was launched.
* ``monitoring-state`` - Indicates whether detailed monitoring is enabled (``disabled`` | ``enabled`` ).
* ``network-interface.addresses.private-ip-address`` - The private IPv4 address associated with the network interface.
* ``network-interface.addresses.primary`` - Specifies whether the IPv4 address of the network interface is the primary private IPv4 address.
* ``network-interface.addresses.association.public-ip`` - The ID of the association of an Elastic IP address (IPv4) with a network interface.
* ``network-interface.addresses.association.ip-owner-id`` - The owner ID of the private IPv4 address associated with the network interface.
* ``network-interface.association.public-ip`` - The address of the Elastic IP address (IPv4) bound to the network interface.
* ``network-interface.association.ip-owner-id`` - The owner of the Elastic IP address (IPv4) associated with the network interface.
* ``network-interface.association.allocation-id`` - The allocation ID returned when you allocated the Elastic IP address (IPv4) for your network interface.
* ``network-interface.association.association-id`` - The association ID returned when the network interface was associated with an IPv4 address.
* ``network-interface.attachment.attachment-id`` - The ID of the interface attachment.
* ``network-interface.attachment.instance-id`` - The ID of the instance to which the network interface is attached.
* ``network-interface.attachment.instance-owner-id`` - The owner ID of the instance to which the network interface is attached.
* ``network-interface.attachment.device-index`` - The device index to which the network interface is attached.
* ``network-interface.attachment.status`` - The status of the attachment (``attaching`` | ``attached`` | ``detaching`` | ``detached`` ).
* ``network-interface.attachment.attach-time`` - The time that the network interface was attached to an instance.
* ``network-interface.attachment.delete-on-termination`` - Specifies whether the attachment is deleted when an instance is terminated.
* ``network-interface.availability-zone`` - The Availability Zone for the network interface.
* ``network-interface.description`` - The description of the network interface.
* ``network-interface.group-id`` - The ID of a security group associated with the network interface.
* ``network-interface.group-name`` - The name of a security group associated with the network interface.
* ``network-interface.ipv6-addresses.ipv6-address`` - The IPv6 address associated with the network interface.
* ``network-interface.mac-address`` - The MAC address of the network interface.
* ``network-interface.network-interface-id`` - The ID of the network interface.
* ``network-interface.owner-id`` - The ID of the owner of the network interface.
* ``network-interface.private-dns-name`` - The private DNS name of the network interface.
* ``network-interface.requester-id`` - The requester ID for the network interface.
* ``network-interface.requester-managed`` - Indicates whether the network interface is being managed by AWS.
* ``network-interface.status`` - The status of the network interface (``available`` ) | ``in-use`` ).
* ``network-interface.source-dest-check`` - Whether the network interface performs source/destination checking. A value of ``true`` means that checking is enabled, and ``false`` means that checking is disabled. The value must be ``false`` for the network interface to perform network address translation (NAT) in your VPC.
* ``network-interface.subnet-id`` - The ID of the subnet for the network interface.
* ``network-interface.vpc-id`` - The ID of the VPC for the network interface.
* ``owner-id`` - The AWS account ID of the instance owner.
* ``placement-group-name`` - The name of the placement group for the instance.
* ``placement-partition-number`` - The partition in which the instance is located.
* ``platform`` - The platform. To list only Windows instances, use ``windows`` .
* ``private-dns-name`` - The private IPv4 DNS name of the instance.
* ``private-ip-address`` - The private IPv4 address of the instance.
* ``product-code`` - The product code associated with the AMI used to launch the instance.
* ``product-code.type`` - The type of product code (``devpay`` | ``marketplace`` ).
* ``ramdisk-id`` - The RAM disk ID.
* ``reason`` - The reason for the current state of the instance (for example, shows \"User Initiated [date]\" when you stop or terminate the instance). Similar to the state-reason-code filter.
* ``requester-id`` - The ID of the entity that launched the instance on your behalf (for example, AWS Management Console, Auto Scaling, and so on).
* ``reservation-id`` - The ID of the instance\'s reservation. A reservation ID is created any time you launch an instance. A reservation ID has a one-to-one relationship with an instance launch request, but can be associated with more than one instance if you launch multiple instances using the same launch request. For example, if you launch one instance, you get one reservation ID. If you launch ten instances using the same launch request, you also get one reservation ID.
* ``root-device-name`` - The device name of the root device volume (for example, ``/dev/sda1`` ).
* ``root-device-type`` - The type of the root device volume (``ebs`` | ``instance-store`` ).
* ``source-dest-check`` - Indicates whether the instance performs source/destination checking. A value of ``true`` means that checking is enabled, and ``false`` means that checking is disabled. The value must be ``false`` for the instance to perform network address translation (NAT) in your VPC.
* ``spot-instance-request-id`` - The ID of the Spot Instance request.
* ``state-reason-code`` - The reason code for the state change.
* ``state-reason-message`` - A message that describes the state change.
* ``subnet-id`` - The ID of the subnet for the instance.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources that have a tag with a specific key, regardless of the tag value.
* ``tenancy`` - The tenancy of an instance (``dedicated`` | ``default`` | ``host`` ).
* ``virtualization-type`` - The virtualization type of the instance (``paravirtual`` | ``hvm`` ).
* ``vpc-id`` - The ID of the VPC that the instance is running in.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type InstanceIds: list
:param InstanceIds:
The instance IDs.
Default: Describes all your instances.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type MaxResults: integer
:param MaxResults:
The maximum number of results to return in a single call. To retrieve the remaining results, make another call with the returned ``NextToken`` value. This value can be between 5 and 1000. You cannot specify this parameter and the instance IDs parameter in the same call.
:type NextToken: string
:param NextToken:
The token to request the next page of results.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 5
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class InstanceRunning(Waiter):
def wait(self, Filters: List = None, InstanceIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_instances` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeInstances>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
InstanceIds=[
'string',
],
DryRun=True|False,
MaxResults=123,
NextToken='string',
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
The filters.
* ``affinity`` - The affinity setting for an instance running on a Dedicated Host (``default`` | ``host`` ).
* ``architecture`` - The instance architecture (``i386`` | ``x86_64`` ).
* ``availability-zone`` - The Availability Zone of the instance.
* ``block-device-mapping.attach-time`` - The attach time for an EBS volume mapped to the instance, for example, ``2010-09-15T17:15:20.000Z`` .
* ``block-device-mapping.delete-on-termination`` - A Boolean that indicates whether the EBS volume is deleted on instance termination.
* ``block-device-mapping.device-name`` - The device name specified in the block device mapping (for example, ``/dev/sdh`` or ``xvdh`` ).
* ``block-device-mapping.status`` - The status for the EBS volume (``attaching`` | ``attached`` | ``detaching`` | ``detached`` ).
* ``block-device-mapping.volume-id`` - The volume ID of the EBS volume.
* ``client-token`` - The idempotency token you provided when you launched the instance.
* ``dns-name`` - The public DNS name of the instance.
* ``group-id`` - The ID of the security group for the instance. EC2-Classic only.
* ``group-name`` - The name of the security group for the instance. EC2-Classic only.
* ``hibernation-options.configured`` - A Boolean that indicates whether the instance is enabled for hibernation. A value of ``true`` means that the instance is enabled for hibernation.
* ``host-id`` - The ID of the Dedicated Host on which the instance is running, if applicable.
* ``hypervisor`` - The hypervisor type of the instance (``ovm`` | ``xen`` ).
* ``iam-instance-profile.arn`` - The instance profile associated with the instance. Specified as an ARN.
* ``image-id`` - The ID of the image used to launch the instance.
* ``instance-id`` - The ID of the instance.
* ``instance-lifecycle`` - Indicates whether this is a Spot Instance or a Scheduled Instance (``spot`` | ``scheduled`` ).
* ``instance-state-code`` - The state of the instance, as a 16-bit unsigned integer. The high byte is used for internal purposes and should be ignored. The low byte is set based on the state represented. The valid values are: 0 (pending), 16 (running), 32 (shutting-down), 48 (terminated), 64 (stopping), and 80 (stopped).
* ``instance-state-name`` - The state of the instance (``pending`` | ``running`` | ``shutting-down`` | ``terminated`` | ``stopping`` | ``stopped`` ).
* ``instance-type`` - The type of instance (for example, ``t2.micro`` ).
* ``instance.group-id`` - The ID of the security group for the instance.
* ``instance.group-name`` - The name of the security group for the instance.
* ``ip-address`` - The public IPv4 address of the instance.
* ``kernel-id`` - The kernel ID.
* ``key-name`` - The name of the key pair used when the instance was launched.
* ``launch-index`` - When launching multiple instances, this is the index for the instance in the launch group (for example, 0, 1, 2, and so on).
* ``launch-time`` - The time when the instance was launched.
* ``monitoring-state`` - Indicates whether detailed monitoring is enabled (``disabled`` | ``enabled`` ).
* ``network-interface.addresses.private-ip-address`` - The private IPv4 address associated with the network interface.
* ``network-interface.addresses.primary`` - Specifies whether the IPv4 address of the network interface is the primary private IPv4 address.
* ``network-interface.addresses.association.public-ip`` - The ID of the association of an Elastic IP address (IPv4) with a network interface.
* ``network-interface.addresses.association.ip-owner-id`` - The owner ID of the private IPv4 address associated with the network interface.
* ``network-interface.association.public-ip`` - The address of the Elastic IP address (IPv4) bound to the network interface.
* ``network-interface.association.ip-owner-id`` - The owner of the Elastic IP address (IPv4) associated with the network interface.
* ``network-interface.association.allocation-id`` - The allocation ID returned when you allocated the Elastic IP address (IPv4) for your network interface.
* ``network-interface.association.association-id`` - The association ID returned when the network interface was associated with an IPv4 address.
* ``network-interface.attachment.attachment-id`` - The ID of the interface attachment.
* ``network-interface.attachment.instance-id`` - The ID of the instance to which the network interface is attached.
* ``network-interface.attachment.instance-owner-id`` - The owner ID of the instance to which the network interface is attached.
* ``network-interface.attachment.device-index`` - The device index to which the network interface is attached.
* ``network-interface.attachment.status`` - The status of the attachment (``attaching`` | ``attached`` | ``detaching`` | ``detached`` ).
* ``network-interface.attachment.attach-time`` - The time that the network interface was attached to an instance.
* ``network-interface.attachment.delete-on-termination`` - Specifies whether the attachment is deleted when an instance is terminated.
* ``network-interface.availability-zone`` - The Availability Zone for the network interface.
* ``network-interface.description`` - The description of the network interface.
* ``network-interface.group-id`` - The ID of a security group associated with the network interface.
* ``network-interface.group-name`` - The name of a security group associated with the network interface.
* ``network-interface.ipv6-addresses.ipv6-address`` - The IPv6 address associated with the network interface.
* ``network-interface.mac-address`` - The MAC address of the network interface.
* ``network-interface.network-interface-id`` - The ID of the network interface.
* ``network-interface.owner-id`` - The ID of the owner of the network interface.
* ``network-interface.private-dns-name`` - The private DNS name of the network interface.
* ``network-interface.requester-id`` - The requester ID for the network interface.
* ``network-interface.requester-managed`` - Indicates whether the network interface is being managed by AWS.
* ``network-interface.status`` - The status of the network interface (``available`` ) | ``in-use`` ).
* ``network-interface.source-dest-check`` - Whether the network interface performs source/destination checking. A value of ``true`` means that checking is enabled, and ``false`` means that checking is disabled. The value must be ``false`` for the network interface to perform network address translation (NAT) in your VPC.
* ``network-interface.subnet-id`` - The ID of the subnet for the network interface.
* ``network-interface.vpc-id`` - The ID of the VPC for the network interface.
* ``owner-id`` - The AWS account ID of the instance owner.
* ``placement-group-name`` - The name of the placement group for the instance.
* ``placement-partition-number`` - The partition in which the instance is located.
* ``platform`` - The platform. To list only Windows instances, use ``windows`` .
* ``private-dns-name`` - The private IPv4 DNS name of the instance.
* ``private-ip-address`` - The private IPv4 address of the instance.
* ``product-code`` - The product code associated with the AMI used to launch the instance.
* ``product-code.type`` - The type of product code (``devpay`` | ``marketplace`` ).
* ``ramdisk-id`` - The RAM disk ID.
* ``reason`` - The reason for the current state of the instance (for example, shows \"User Initiated [date]\" when you stop or terminate the instance). Similar to the state-reason-code filter.
* ``requester-id`` - The ID of the entity that launched the instance on your behalf (for example, AWS Management Console, Auto Scaling, and so on).
* ``reservation-id`` - The ID of the instance\'s reservation. A reservation ID is created any time you launch an instance. A reservation ID has a one-to-one relationship with an instance launch request, but can be associated with more than one instance if you launch multiple instances using the same launch request. For example, if you launch one instance, you get one reservation ID. If you launch ten instances using the same launch request, you also get one reservation ID.
* ``root-device-name`` - The device name of the root device volume (for example, ``/dev/sda1`` ).
* ``root-device-type`` - The type of the root device volume (``ebs`` | ``instance-store`` ).
* ``source-dest-check`` - Indicates whether the instance performs source/destination checking. A value of ``true`` means that checking is enabled, and ``false`` means that checking is disabled. The value must be ``false`` for the instance to perform network address translation (NAT) in your VPC.
* ``spot-instance-request-id`` - The ID of the Spot Instance request.
* ``state-reason-code`` - The reason code for the state change.
* ``state-reason-message`` - A message that describes the state change.
* ``subnet-id`` - The ID of the subnet for the instance.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources that have a tag with a specific key, regardless of the tag value.
* ``tenancy`` - The tenancy of an instance (``dedicated`` | ``default`` | ``host`` ).
* ``virtualization-type`` - The virtualization type of the instance (``paravirtual`` | ``hvm`` ).
* ``vpc-id`` - The ID of the VPC that the instance is running in.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type InstanceIds: list
:param InstanceIds:
The instance IDs.
Default: Describes all your instances.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type MaxResults: integer
:param MaxResults:
The maximum number of results to return in a single call. To retrieve the remaining results, make another call with the returned ``NextToken`` value. This value can be between 5 and 1000. You cannot specify this parameter and the instance IDs parameter in the same call.
:type NextToken: string
:param NextToken:
The token to request the next page of results.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class InstanceStatusOk(Waiter):
def wait(self, Filters: List = None, InstanceIds: List = None, MaxResults: int = None, NextToken: str = None, DryRun: bool = None, IncludeAllInstances: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_instance_status` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeInstanceStatus>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
InstanceIds=[
'string',
],
MaxResults=123,
NextToken='string',
DryRun=True|False,
IncludeAllInstances=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
The filters.
* ``availability-zone`` - The Availability Zone of the instance.
* ``event.code`` - The code for the scheduled event (``instance-reboot`` | ``system-reboot`` | ``system-maintenance`` | ``instance-retirement`` | ``instance-stop`` ).
* ``event.description`` - A description of the event.
* ``event.instance-event-id`` - The ID of the event whose date and time you are modifying.
* ``event.not-after`` - The latest end time for the scheduled event (for example, ``2014-09-15T17:15:20.000Z`` ).
* ``event.not-before`` - The earliest start time for the scheduled event (for example, ``2014-09-15T17:15:20.000Z`` ).
* ``event.not-before-deadline`` - The deadline for starting the event (for example, ``2014-09-15T17:15:20.000Z`` ).
* ``instance-state-code`` - The code for the instance state, as a 16-bit unsigned integer. The high byte is used for internal purposes and should be ignored. The low byte is set based on the state represented. The valid values are 0 (pending), 16 (running), 32 (shutting-down), 48 (terminated), 64 (stopping), and 80 (stopped).
* ``instance-state-name`` - The state of the instance (``pending`` | ``running`` | ``shutting-down`` | ``terminated`` | ``stopping`` | ``stopped`` ).
* ``instance-status.reachability`` - Filters on instance status where the name is ``reachability`` (``passed`` | ``failed`` | ``initializing`` | ``insufficient-data`` ).
* ``instance-status.status`` - The status of the instance (``ok`` | ``impaired`` | ``initializing`` | ``insufficient-data`` | ``not-applicable`` ).
* ``system-status.reachability`` - Filters on system status where the name is ``reachability`` (``passed`` | ``failed`` | ``initializing`` | ``insufficient-data`` ).
* ``system-status.status`` - The system status of the instance (``ok`` | ``impaired`` | ``initializing`` | ``insufficient-data`` | ``not-applicable`` ).
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type InstanceIds: list
:param InstanceIds:
The instance IDs.
Default: Describes all your instances.
Constraints: Maximum 100 explicitly specified instance IDs.
- *(string) --*
:type MaxResults: integer
:param MaxResults:
The maximum number of results to return in a single call. To retrieve the remaining results, make another call with the returned ``NextToken`` value. This value can be between 5 and 1000. You cannot specify this parameter and the instance IDs parameter in the same call.
:type NextToken: string
:param NextToken:
The token to retrieve the next page of results.
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type IncludeAllInstances: boolean
:param IncludeAllInstances:
When ``true`` , includes the health status for all instances. When ``false`` , includes the health status for running instances only.
Default: ``false``
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class InstanceStopped(Waiter):
def wait(self, Filters: List = None, InstanceIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_instances` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeInstances>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
InstanceIds=[
'string',
],
DryRun=True|False,
MaxResults=123,
NextToken='string',
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
The filters.
* ``affinity`` - The affinity setting for an instance running on a Dedicated Host (``default`` | ``host`` ).
* ``architecture`` - The instance architecture (``i386`` | ``x86_64`` ).
* ``availability-zone`` - The Availability Zone of the instance.
* ``block-device-mapping.attach-time`` - The attach time for an EBS volume mapped to the instance, for example, ``2010-09-15T17:15:20.000Z`` .
* ``block-device-mapping.delete-on-termination`` - A Boolean that indicates whether the EBS volume is deleted on instance termination.
* ``block-device-mapping.device-name`` - The device name specified in the block device mapping (for example, ``/dev/sdh`` or ``xvdh`` ).
* ``block-device-mapping.status`` - The status for the EBS volume (``attaching`` | ``attached`` | ``detaching`` | ``detached`` ).
* ``block-device-mapping.volume-id`` - The volume ID of the EBS volume.
* ``client-token`` - The idempotency token you provided when you launched the instance.
* ``dns-name`` - The public DNS name of the instance.
* ``group-id`` - The ID of the security group for the instance. EC2-Classic only.
* ``group-name`` - The name of the security group for the instance. EC2-Classic only.
* ``hibernation-options.configured`` - A Boolean that indicates whether the instance is enabled for hibernation. A value of ``true`` means that the instance is enabled for hibernation.
* ``host-id`` - The ID of the Dedicated Host on which the instance is running, if applicable.
* ``hypervisor`` - The hypervisor type of the instance (``ovm`` | ``xen`` ).
* ``iam-instance-profile.arn`` - The instance profile associated with the instance. Specified as an ARN.
* ``image-id`` - The ID of the image used to launch the instance.
* ``instance-id`` - The ID of the instance.
* ``instance-lifecycle`` - Indicates whether this is a Spot Instance or a Scheduled Instance (``spot`` | ``scheduled`` ).
* ``instance-state-code`` - The state of the instance, as a 16-bit unsigned integer. The high byte is used for internal purposes and should be ignored. The low byte is set based on the state represented. The valid values are: 0 (pending), 16 (running), 32 (shutting-down), 48 (terminated), 64 (stopping), and 80 (stopped).
* ``instance-state-name`` - The state of the instance (``pending`` | ``running`` | ``shutting-down`` | ``terminated`` | ``stopping`` | ``stopped`` ).
* ``instance-type`` - The type of instance (for example, ``t2.micro`` ).
* ``instance.group-id`` - The ID of the security group for the instance.
* ``instance.group-name`` - The name of the security group for the instance.
* ``ip-address`` - The public IPv4 address of the instance.
* ``kernel-id`` - The kernel ID.
* ``key-name`` - The name of the key pair used when the instance was launched.
* ``launch-index`` - When launching multiple instances, this is the index for the instance in the launch group (for example, 0, 1, 2, and so on).
* ``launch-time`` - The time when the instance was launched.
* ``monitoring-state`` - Indicates whether detailed monitoring is enabled (``disabled`` | ``enabled`` ).
* ``network-interface.addresses.private-ip-address`` - The private IPv4 address associated with the network interface.
* ``network-interface.addresses.primary`` - Specifies whether the IPv4 address of the network interface is the primary private IPv4 address.
* ``network-interface.addresses.association.public-ip`` - The ID of the association of an Elastic IP address (IPv4) with a network interface.
* ``network-interface.addresses.association.ip-owner-id`` - The owner ID of the private IPv4 address associated with the network interface.
* ``network-interface.association.public-ip`` - The address of the Elastic IP address (IPv4) bound to the network interface.
* ``network-interface.association.ip-owner-id`` - The owner of the Elastic IP address (IPv4) associated with the network interface.
* ``network-interface.association.allocation-id`` - The allocation ID returned when you allocated the Elastic IP address (IPv4) for your network interface.
* ``network-interface.association.association-id`` - The association ID returned when the network interface was associated with an IPv4 address.
* ``network-interface.attachment.attachment-id`` - The ID of the interface attachment.
* ``network-interface.attachment.instance-id`` - The ID of the instance to which the network interface is attached.
* ``network-interface.attachment.instance-owner-id`` - The owner ID of the instance to which the network interface is attached.
* ``network-interface.attachment.device-index`` - The device index to which the network interface is attached.
* ``network-interface.attachment.status`` - The status of the attachment (``attaching`` | ``attached`` | ``detaching`` | ``detached`` ).
* ``network-interface.attachment.attach-time`` - The time that the network interface was attached to an instance.
* ``network-interface.attachment.delete-on-termination`` - Specifies whether the attachment is deleted when an instance is terminated.
* ``network-interface.availability-zone`` - The Availability Zone for the network interface.
* ``network-interface.description`` - The description of the network interface.
* ``network-interface.group-id`` - The ID of a security group associated with the network interface.
* ``network-interface.group-name`` - The name of a security group associated with the network interface.
* ``network-interface.ipv6-addresses.ipv6-address`` - The IPv6 address associated with the network interface.
* ``network-interface.mac-address`` - The MAC address of the network interface.
* ``network-interface.network-interface-id`` - The ID of the network interface.
* ``network-interface.owner-id`` - The ID of the owner of the network interface.
* ``network-interface.private-dns-name`` - The private DNS name of the network interface.
* ``network-interface.requester-id`` - The requester ID for the network interface.
* ``network-interface.requester-managed`` - Indicates whether the network interface is being managed by AWS.
* ``network-interface.status`` - The status of the network interface (``available`` ) | ``in-use`` ).
* ``network-interface.source-dest-check`` - Whether the network interface performs source/destination checking. A value of ``true`` means that checking is enabled, and ``false`` means that checking is disabled. The value must be ``false`` for the network interface to perform network address translation (NAT) in your VPC.
* ``network-interface.subnet-id`` - The ID of the subnet for the network interface.
* ``network-interface.vpc-id`` - The ID of the VPC for the network interface.
* ``owner-id`` - The AWS account ID of the instance owner.
* ``placement-group-name`` - The name of the placement group for the instance.
* ``placement-partition-number`` - The partition in which the instance is located.
* ``platform`` - The platform. To list only Windows instances, use ``windows`` .
* ``private-dns-name`` - The private IPv4 DNS name of the instance.
* ``private-ip-address`` - The private IPv4 address of the instance.
* ``product-code`` - The product code associated with the AMI used to launch the instance.
* ``product-code.type`` - The type of product code (``devpay`` | ``marketplace`` ).
* ``ramdisk-id`` - The RAM disk ID.
* ``reason`` - The reason for the current state of the instance (for example, shows \"User Initiated [date]\" when you stop or terminate the instance). Similar to the state-reason-code filter.
* ``requester-id`` - The ID of the entity that launched the instance on your behalf (for example, AWS Management Console, Auto Scaling, and so on).
* ``reservation-id`` - The ID of the instance\'s reservation. A reservation ID is created any time you launch an instance. A reservation ID has a one-to-one relationship with an instance launch request, but can be associated with more than one instance if you launch multiple instances using the same launch request. For example, if you launch one instance, you get one reservation ID. If you launch ten instances using the same launch request, you also get one reservation ID.
* ``root-device-name`` - The device name of the root device volume (for example, ``/dev/sda1`` ).
* ``root-device-type`` - The type of the root device volume (``ebs`` | ``instance-store`` ).
* ``source-dest-check`` - Indicates whether the instance performs source/destination checking. A value of ``true`` means that checking is enabled, and ``false`` means that checking is disabled. The value must be ``false`` for the instance to perform network address translation (NAT) in your VPC.
* ``spot-instance-request-id`` - The ID of the Spot Instance request.
* ``state-reason-code`` - The reason code for the state change.
* ``state-reason-message`` - A message that describes the state change.
* ``subnet-id`` - The ID of the subnet for the instance.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources that have a tag with a specific key, regardless of the tag value.
* ``tenancy`` - The tenancy of an instance (``dedicated`` | ``default`` | ``host`` ).
* ``virtualization-type`` - The virtualization type of the instance (``paravirtual`` | ``hvm`` ).
* ``vpc-id`` - The ID of the VPC that the instance is running in.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type InstanceIds: list
:param InstanceIds:
The instance IDs.
Default: Describes all your instances.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type MaxResults: integer
:param MaxResults:
The maximum number of results to return in a single call. To retrieve the remaining results, make another call with the returned ``NextToken`` value. This value can be between 5 and 1000. You cannot specify this parameter and the instance IDs parameter in the same call.
:type NextToken: string
:param NextToken:
The token to request the next page of results.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class InstanceTerminated(Waiter):
def wait(self, Filters: List = None, InstanceIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_instances` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeInstances>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
InstanceIds=[
'string',
],
DryRun=True|False,
MaxResults=123,
NextToken='string',
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
The filters.
* ``affinity`` - The affinity setting for an instance running on a Dedicated Host (``default`` | ``host`` ).
* ``architecture`` - The instance architecture (``i386`` | ``x86_64`` ).
* ``availability-zone`` - The Availability Zone of the instance.
* ``block-device-mapping.attach-time`` - The attach time for an EBS volume mapped to the instance, for example, ``2010-09-15T17:15:20.000Z`` .
* ``block-device-mapping.delete-on-termination`` - A Boolean that indicates whether the EBS volume is deleted on instance termination.
* ``block-device-mapping.device-name`` - The device name specified in the block device mapping (for example, ``/dev/sdh`` or ``xvdh`` ).
* ``block-device-mapping.status`` - The status for the EBS volume (``attaching`` | ``attached`` | ``detaching`` | ``detached`` ).
* ``block-device-mapping.volume-id`` - The volume ID of the EBS volume.
* ``client-token`` - The idempotency token you provided when you launched the instance.
* ``dns-name`` - The public DNS name of the instance.
* ``group-id`` - The ID of the security group for the instance. EC2-Classic only.
* ``group-name`` - The name of the security group for the instance. EC2-Classic only.
* ``hibernation-options.configured`` - A Boolean that indicates whether the instance is enabled for hibernation. A value of ``true`` means that the instance is enabled for hibernation.
* ``host-id`` - The ID of the Dedicated Host on which the instance is running, if applicable.
* ``hypervisor`` - The hypervisor type of the instance (``ovm`` | ``xen`` ).
* ``iam-instance-profile.arn`` - The instance profile associated with the instance. Specified as an ARN.
* ``image-id`` - The ID of the image used to launch the instance.
* ``instance-id`` - The ID of the instance.
* ``instance-lifecycle`` - Indicates whether this is a Spot Instance or a Scheduled Instance (``spot`` | ``scheduled`` ).
* ``instance-state-code`` - The state of the instance, as a 16-bit unsigned integer. The high byte is used for internal purposes and should be ignored. The low byte is set based on the state represented. The valid values are: 0 (pending), 16 (running), 32 (shutting-down), 48 (terminated), 64 (stopping), and 80 (stopped).
* ``instance-state-name`` - The state of the instance (``pending`` | ``running`` | ``shutting-down`` | ``terminated`` | ``stopping`` | ``stopped`` ).
* ``instance-type`` - The type of instance (for example, ``t2.micro`` ).
* ``instance.group-id`` - The ID of the security group for the instance.
* ``instance.group-name`` - The name of the security group for the instance.
* ``ip-address`` - The public IPv4 address of the instance.
* ``kernel-id`` - The kernel ID.
* ``key-name`` - The name of the key pair used when the instance was launched.
* ``launch-index`` - When launching multiple instances, this is the index for the instance in the launch group (for example, 0, 1, 2, and so on).
* ``launch-time`` - The time when the instance was launched.
* ``monitoring-state`` - Indicates whether detailed monitoring is enabled (``disabled`` | ``enabled`` ).
* ``network-interface.addresses.private-ip-address`` - The private IPv4 address associated with the network interface.
* ``network-interface.addresses.primary`` - Specifies whether the IPv4 address of the network interface is the primary private IPv4 address.
* ``network-interface.addresses.association.public-ip`` - The ID of the association of an Elastic IP address (IPv4) with a network interface.
* ``network-interface.addresses.association.ip-owner-id`` - The owner ID of the private IPv4 address associated with the network interface.
* ``network-interface.association.public-ip`` - The address of the Elastic IP address (IPv4) bound to the network interface.
* ``network-interface.association.ip-owner-id`` - The owner of the Elastic IP address (IPv4) associated with the network interface.
* ``network-interface.association.allocation-id`` - The allocation ID returned when you allocated the Elastic IP address (IPv4) for your network interface.
* ``network-interface.association.association-id`` - The association ID returned when the network interface was associated with an IPv4 address.
* ``network-interface.attachment.attachment-id`` - The ID of the interface attachment.
* ``network-interface.attachment.instance-id`` - The ID of the instance to which the network interface is attached.
* ``network-interface.attachment.instance-owner-id`` - The owner ID of the instance to which the network interface is attached.
* ``network-interface.attachment.device-index`` - The device index to which the network interface is attached.
* ``network-interface.attachment.status`` - The status of the attachment (``attaching`` | ``attached`` | ``detaching`` | ``detached`` ).
* ``network-interface.attachment.attach-time`` - The time that the network interface was attached to an instance.
* ``network-interface.attachment.delete-on-termination`` - Specifies whether the attachment is deleted when an instance is terminated.
* ``network-interface.availability-zone`` - The Availability Zone for the network interface.
* ``network-interface.description`` - The description of the network interface.
* ``network-interface.group-id`` - The ID of a security group associated with the network interface.
* ``network-interface.group-name`` - The name of a security group associated with the network interface.
* ``network-interface.ipv6-addresses.ipv6-address`` - The IPv6 address associated with the network interface.
* ``network-interface.mac-address`` - The MAC address of the network interface.
* ``network-interface.network-interface-id`` - The ID of the network interface.
* ``network-interface.owner-id`` - The ID of the owner of the network interface.
* ``network-interface.private-dns-name`` - The private DNS name of the network interface.
* ``network-interface.requester-id`` - The requester ID for the network interface.
* ``network-interface.requester-managed`` - Indicates whether the network interface is being managed by AWS.
* ``network-interface.status`` - The status of the network interface (``available`` ) | ``in-use`` ).
* ``network-interface.source-dest-check`` - Whether the network interface performs source/destination checking. A value of ``true`` means that checking is enabled, and ``false`` means that checking is disabled. The value must be ``false`` for the network interface to perform network address translation (NAT) in your VPC.
* ``network-interface.subnet-id`` - The ID of the subnet for the network interface.
* ``network-interface.vpc-id`` - The ID of the VPC for the network interface.
* ``owner-id`` - The AWS account ID of the instance owner.
* ``placement-group-name`` - The name of the placement group for the instance.
* ``placement-partition-number`` - The partition in which the instance is located.
* ``platform`` - The platform. To list only Windows instances, use ``windows`` .
* ``private-dns-name`` - The private IPv4 DNS name of the instance.
* ``private-ip-address`` - The private IPv4 address of the instance.
* ``product-code`` - The product code associated with the AMI used to launch the instance.
* ``product-code.type`` - The type of product code (``devpay`` | ``marketplace`` ).
* ``ramdisk-id`` - The RAM disk ID.
* ``reason`` - The reason for the current state of the instance (for example, shows \"User Initiated [date]\" when you stop or terminate the instance). Similar to the state-reason-code filter.
* ``requester-id`` - The ID of the entity that launched the instance on your behalf (for example, AWS Management Console, Auto Scaling, and so on).
* ``reservation-id`` - The ID of the instance\'s reservation. A reservation ID is created any time you launch an instance. A reservation ID has a one-to-one relationship with an instance launch request, but can be associated with more than one instance if you launch multiple instances using the same launch request. For example, if you launch one instance, you get one reservation ID. If you launch ten instances using the same launch request, you also get one reservation ID.
* ``root-device-name`` - The device name of the root device volume (for example, ``/dev/sda1`` ).
* ``root-device-type`` - The type of the root device volume (``ebs`` | ``instance-store`` ).
* ``source-dest-check`` - Indicates whether the instance performs source/destination checking. A value of ``true`` means that checking is enabled, and ``false`` means that checking is disabled. The value must be ``false`` for the instance to perform network address translation (NAT) in your VPC.
* ``spot-instance-request-id`` - The ID of the Spot Instance request.
* ``state-reason-code`` - The reason code for the state change.
* ``state-reason-message`` - A message that describes the state change.
* ``subnet-id`` - The ID of the subnet for the instance.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources that have a tag with a specific key, regardless of the tag value.
* ``tenancy`` - The tenancy of an instance (``dedicated`` | ``default`` | ``host`` ).
* ``virtualization-type`` - The virtualization type of the instance (``paravirtual`` | ``hvm`` ).
* ``vpc-id`` - The ID of the VPC that the instance is running in.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type InstanceIds: list
:param InstanceIds:
The instance IDs.
Default: Describes all your instances.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type MaxResults: integer
:param MaxResults:
The maximum number of results to return in a single call. To retrieve the remaining results, make another call with the returned ``NextToken`` value. This value can be between 5 and 1000. You cannot specify this parameter and the instance IDs parameter in the same call.
:type NextToken: string
:param NextToken:
The token to request the next page of results.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class KeyPairExists(Waiter):
def wait(self, Filters: List = None, KeyNames: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_key_pairs` every 5 seconds until a successful state is reached. An error is returned after 6 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeKeyPairs>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
KeyNames=[
'string',
],
DryRun=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
The filters.
* ``fingerprint`` - The fingerprint of the key pair.
* ``key-name`` - The name of the key pair.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type KeyNames: list
:param KeyNames:
The key pair names.
Default: Describes all your key pairs.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 5
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 6
:returns: None
"""
pass
class NatGatewayAvailable(Waiter):
def wait(self, Filters: List = None, MaxResults: int = None, NatGatewayIds: List = None, NextToken: str = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_nat_gateways` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeNatGateways>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxResults=123,
NatGatewayIds=[
'string',
],
NextToken='string',
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
One or more filters.
* ``nat-gateway-id`` - The ID of the NAT gateway.
* ``state`` - The state of the NAT gateway (``pending`` | ``failed`` | ``available`` | ``deleting`` | ``deleted`` ).
* ``subnet-id`` - The ID of the subnet in which the NAT gateway resides.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``vpc-id`` - The ID of the VPC in which the NAT gateway resides.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type MaxResults: integer
:param MaxResults:
The maximum number of results to return with a single call. To retrieve the remaining results, make another call with the returned ``nextToken`` value.
:type NatGatewayIds: list
:param NatGatewayIds:
One or more NAT gateway IDs.
- *(string) --*
:type NextToken: string
:param NextToken:
The token for the next page of results.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class NetworkInterfaceAvailable(Waiter):
def wait(self, Filters: List = None, DryRun: bool = None, NetworkInterfaceIds: List = None, NextToken: str = None, MaxResults: int = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_network_interfaces` every 20 seconds until a successful state is reached. An error is returned after 10 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeNetworkInterfaces>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
DryRun=True|False,
NetworkInterfaceIds=[
'string',
],
NextToken='string',
MaxResults=123,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
One or more filters.
* ``addresses.private-ip-address`` - The private IPv4 addresses associated with the network interface.
* ``addresses.primary`` - Whether the private IPv4 address is the primary IP address associated with the network interface.
* ``addresses.association.public-ip`` - The association ID returned when the network interface was associated with the Elastic IP address (IPv4).
* ``addresses.association.owner-id`` - The owner ID of the addresses associated with the network interface.
* ``association.association-id`` - The association ID returned when the network interface was associated with an IPv4 address.
* ``association.allocation-id`` - The allocation ID returned when you allocated the Elastic IP address (IPv4) for your network interface.
* ``association.ip-owner-id`` - The owner of the Elastic IP address (IPv4) associated with the network interface.
* ``association.public-ip`` - The address of the Elastic IP address (IPv4) bound to the network interface.
* ``association.public-dns-name`` - The public DNS name for the network interface (IPv4).
* ``attachment.attachment-id`` - The ID of the interface attachment.
* ``attachment.attach.time`` - The time that the network interface was attached to an instance.
* ``attachment.delete-on-termination`` - Indicates whether the attachment is deleted when an instance is terminated.
* ``attachment.device-index`` - The device index to which the network interface is attached.
* ``attachment.instance-id`` - The ID of the instance to which the network interface is attached.
* ``attachment.instance-owner-id`` - The owner ID of the instance to which the network interface is attached.
* ``attachment.nat-gateway-id`` - The ID of the NAT gateway to which the network interface is attached.
* ``attachment.status`` - The status of the attachment (``attaching`` | ``attached`` | ``detaching`` | ``detached`` ).
* ``availability-zone`` - The Availability Zone of the network interface.
* ``description`` - The description of the network interface.
* ``group-id`` - The ID of a security group associated with the network interface.
* ``group-name`` - The name of a security group associated with the network interface.
* ``ipv6-addresses.ipv6-address`` - An IPv6 address associated with the network interface.
* ``mac-address`` - The MAC address of the network interface.
* ``network-interface-id`` - The ID of the network interface.
* ``owner-id`` - The AWS account ID of the network interface owner.
* ``private-ip-address`` - The private IPv4 address or addresses of the network interface.
* ``private-dns-name`` - The private DNS name of the network interface (IPv4).
* ``requester-id`` - The ID of the entity that launched the instance on your behalf (for example, AWS Management Console, Auto Scaling, and so on).
* ``requester-managed`` - Indicates whether the network interface is being managed by an AWS service (for example, AWS Management Console, Auto Scaling, and so on).
* ``source-dest-check`` - Indicates whether the network interface performs source/destination checking. A value of ``true`` means checking is enabled, and ``false`` means checking is disabled. The value must be ``false`` for the network interface to perform network address translation (NAT) in your VPC.
* ``status`` - The status of the network interface. If the network interface is not attached to an instance, the status is ``available`` ; if a network interface is attached to an instance the status is ``in-use`` .
* ``subnet-id`` - The ID of the subnet for the network interface.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``vpc-id`` - The ID of the VPC for the network interface.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type NetworkInterfaceIds: list
:param NetworkInterfaceIds:
One or more network interface IDs.
Default: Describes all your network interfaces.
- *(string) --*
:type NextToken: string
:param NextToken:
The token to retrieve the next page of results.
:type MaxResults: integer
:param MaxResults:
The maximum number of items to return for this request. The request returns a token that you can specify in a subsequent call to get the next set of results.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 20
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 10
:returns: None
"""
pass
class PasswordDataAvailable(Waiter):
def wait(self, InstanceId: str, DryRun: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.get_password_data` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/GetPasswordData>`_
**Request Syntax**
::
waiter.wait(
InstanceId='string',
DryRun=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type InstanceId: string
:param InstanceId: **[REQUIRED]**
The ID of the Windows instance.
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class SnapshotCompleted(Waiter):
def wait(self, Filters: List = None, MaxResults: int = None, NextToken: str = None, OwnerIds: List = None, RestorableByUserIds: List = None, SnapshotIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_snapshots` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeSnapshots>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
MaxResults=123,
NextToken='string',
OwnerIds=[
'string',
],
RestorableByUserIds=[
'string',
],
SnapshotIds=[
'string',
],
DryRun=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
The filters.
* ``description`` - A description of the snapshot.
* ``encrypted`` - Indicates whether the snapshot is encrypted (``true`` | ``false`` )
* ``owner-alias`` - Value from an Amazon-maintained list (``amazon`` | ``self`` | ``all`` | ``aws-marketplace`` | ``microsoft`` ) of snapshot owners. Not to be confused with the user-configured AWS account alias, which is set from the IAM console.
* ``owner-id`` - The ID of the AWS account that owns the snapshot.
* ``progress`` - The progress of the snapshot, as a percentage (for example, 80%).
* ``snapshot-id`` - The snapshot ID.
* ``start-time`` - The time stamp when the snapshot was initiated.
* ``status`` - The status of the snapshot (``pending`` | ``completed`` | ``error`` ).
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``volume-id`` - The ID of the volume the snapshot is for.
* ``volume-size`` - The size of the volume, in GiB.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type MaxResults: integer
:param MaxResults:
The maximum number of snapshot results returned by ``DescribeSnapshots`` in paginated output. When this parameter is used, ``DescribeSnapshots`` only returns ``MaxResults`` results in a single page along with a ``NextToken`` response element. The remaining results of the initial request can be seen by sending another ``DescribeSnapshots`` request with the returned ``NextToken`` value. This value can be between 5 and 1000; if ``MaxResults`` is given a value larger than 1000, only 1000 results are returned. If this parameter is not used, then ``DescribeSnapshots`` returns all results. You cannot specify this parameter and the snapshot IDs parameter in the same request.
:type NextToken: string
:param NextToken:
The ``NextToken`` value returned from a previous paginated ``DescribeSnapshots`` request where ``MaxResults`` was used and the results exceeded the value of that parameter. Pagination continues from the end of the previous results that returned the ``NextToken`` value. This value is ``null`` when there are no more results to return.
:type OwnerIds: list
:param OwnerIds:
Describes the snapshots owned by these owners.
- *(string) --*
:type RestorableByUserIds: list
:param RestorableByUserIds:
The IDs of the AWS accounts that can create volumes from the snapshot.
- *(string) --*
:type SnapshotIds: list
:param SnapshotIds:
The snapshot IDs.
Default: Describes the snapshots for which you have create volume permissions.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class SpotInstanceRequestFulfilled(Waiter):
def wait(self, Filters: List = None, DryRun: bool = None, SpotInstanceRequestIds: List = None, NextToken: str = None, MaxResults: int = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_spot_instance_requests` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeSpotInstanceRequests>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
DryRun=True|False,
SpotInstanceRequestIds=[
'string',
],
NextToken='string',
MaxResults=123,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
One or more filters.
* ``availability-zone-group`` - The Availability Zone group.
* ``create-time`` - The time stamp when the Spot Instance request was created.
* ``fault-code`` - The fault code related to the request.
* ``fault-message`` - The fault message related to the request.
* ``instance-id`` - The ID of the instance that fulfilled the request.
* ``launch-group`` - The Spot Instance launch group.
* ``launch.block-device-mapping.delete-on-termination`` - Indicates whether the EBS volume is deleted on instance termination.
* ``launch.block-device-mapping.device-name`` - The device name for the volume in the block device mapping (for example, ``/dev/sdh`` or ``xvdh`` ).
* ``launch.block-device-mapping.snapshot-id`` - The ID of the snapshot for the EBS volume.
* ``launch.block-device-mapping.volume-size`` - The size of the EBS volume, in GiB.
* ``launch.block-device-mapping.volume-type`` - The type of EBS volume: ``gp2`` for General Purpose SSD, ``io1`` for Provisioned IOPS SSD, ``st1`` for Throughput Optimized HDD, ``sc1`` for Cold HDD, or ``standard`` for Magnetic.
* ``launch.group-id`` - The ID of the security group for the instance.
* ``launch.group-name`` - The name of the security group for the instance.
* ``launch.image-id`` - The ID of the AMI.
* ``launch.instance-type`` - The type of instance (for example, ``m3.medium`` ).
* ``launch.kernel-id`` - The kernel ID.
* ``launch.key-name`` - The name of the key pair the instance launched with.
* ``launch.monitoring-enabled`` - Whether detailed monitoring is enabled for the Spot Instance.
* ``launch.ramdisk-id`` - The RAM disk ID.
* ``launched-availability-zone`` - The Availability Zone in which the request is launched.
* ``network-interface.addresses.primary`` - Indicates whether the IP address is the primary private IP address.
* ``network-interface.delete-on-termination`` - Indicates whether the network interface is deleted when the instance is terminated.
* ``network-interface.description`` - A description of the network interface.
* ``network-interface.device-index`` - The index of the device for the network interface attachment on the instance.
* ``network-interface.group-id`` - The ID of the security group associated with the network interface.
* ``network-interface.network-interface-id`` - The ID of the network interface.
* ``network-interface.private-ip-address`` - The primary private IP address of the network interface.
* ``network-interface.subnet-id`` - The ID of the subnet for the instance.
* ``product-description`` - The product description associated with the instance (``Linux/UNIX`` | ``Windows`` ).
* ``spot-instance-request-id`` - The Spot Instance request ID.
* ``spot-price`` - The maximum hourly price for any Spot Instance launched to fulfill the request.
* ``state`` - The state of the Spot Instance request (``open`` | ``active`` | ``closed`` | ``cancelled`` | ``failed`` ). Spot request status information can help you track your Amazon EC2 Spot Instance requests. For more information, see `Spot Request Status <https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-bid-status.html>`__ in the *Amazon EC2 User Guide for Linux Instances* .
* ``status-code`` - The short code describing the most recent evaluation of your Spot Instance request.
* ``status-message`` - The message explaining the status of the Spot Instance request.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``type`` - The type of Spot Instance request (``one-time`` | ``persistent`` ).
* ``valid-from`` - The start date of the request.
* ``valid-until`` - The end date of the request.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type SpotInstanceRequestIds: list
:param SpotInstanceRequestIds:
One or more Spot Instance request IDs.
- *(string) --*
:type NextToken: string
:param NextToken:
The token to request the next set of results. This value is ``null`` when there are no more results to return.
:type MaxResults: integer
:param MaxResults:
The maximum number of results to return in a single call. Specify a value between 5 and 1000. To retrieve the remaining results, make another call with the returned ``NextToken`` value.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class SubnetAvailable(Waiter):
def wait(self, Filters: List = None, SubnetIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_subnets` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeSubnets>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
SubnetIds=[
'string',
],
DryRun=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
One or more filters.
* ``availability-zone`` - The Availability Zone for the subnet. You can also use ``availabilityZone`` as the filter name.
* ``availability-zone-id`` - The ID of the Availability Zone for the subnet. You can also use ``availabilityZoneId`` as the filter name.
* ``available-ip-address-count`` - The number of IPv4 addresses in the subnet that are available.
* ``cidr-block`` - The IPv4 CIDR block of the subnet. The CIDR block you specify must exactly match the subnet\'s CIDR block for information to be returned for the subnet. You can also use ``cidr`` or ``cidrBlock`` as the filter names.
* ``default-for-az`` - Indicates whether this is the default subnet for the Availability Zone. You can also use ``defaultForAz`` as the filter name.
* ``ipv6-cidr-block-association.ipv6-cidr-block`` - An IPv6 CIDR block associated with the subnet.
* ``ipv6-cidr-block-association.association-id`` - An association ID for an IPv6 CIDR block associated with the subnet.
* ``ipv6-cidr-block-association.state`` - The state of an IPv6 CIDR block associated with the subnet.
* ``owner-id`` - The ID of the AWS account that owns the subnet.
* ``state`` - The state of the subnet (``pending`` | ``available`` ).
* ``subnet-arn`` - The Amazon Resource Name (ARN) of the subnet.
* ``subnet-id`` - The ID of the subnet.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``vpc-id`` - The ID of the VPC for the subnet.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type SubnetIds: list
:param SubnetIds:
One or more subnet IDs.
Default: Describes all your subnets.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class SystemStatusOk(Waiter):
def wait(self, Filters: List = None, InstanceIds: List = None, MaxResults: int = None, NextToken: str = None, DryRun: bool = None, IncludeAllInstances: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_instance_status` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeInstanceStatus>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
InstanceIds=[
'string',
],
MaxResults=123,
NextToken='string',
DryRun=True|False,
IncludeAllInstances=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
The filters.
* ``availability-zone`` - The Availability Zone of the instance.
* ``event.code`` - The code for the scheduled event (``instance-reboot`` | ``system-reboot`` | ``system-maintenance`` | ``instance-retirement`` | ``instance-stop`` ).
* ``event.description`` - A description of the event.
* ``event.instance-event-id`` - The ID of the event whose date and time you are modifying.
* ``event.not-after`` - The latest end time for the scheduled event (for example, ``2014-09-15T17:15:20.000Z`` ).
* ``event.not-before`` - The earliest start time for the scheduled event (for example, ``2014-09-15T17:15:20.000Z`` ).
* ``event.not-before-deadline`` - The deadline for starting the event (for example, ``2014-09-15T17:15:20.000Z`` ).
* ``instance-state-code`` - The code for the instance state, as a 16-bit unsigned integer. The high byte is used for internal purposes and should be ignored. The low byte is set based on the state represented. The valid values are 0 (pending), 16 (running), 32 (shutting-down), 48 (terminated), 64 (stopping), and 80 (stopped).
* ``instance-state-name`` - The state of the instance (``pending`` | ``running`` | ``shutting-down`` | ``terminated`` | ``stopping`` | ``stopped`` ).
* ``instance-status.reachability`` - Filters on instance status where the name is ``reachability`` (``passed`` | ``failed`` | ``initializing`` | ``insufficient-data`` ).
* ``instance-status.status`` - The status of the instance (``ok`` | ``impaired`` | ``initializing`` | ``insufficient-data`` | ``not-applicable`` ).
* ``system-status.reachability`` - Filters on system status where the name is ``reachability`` (``passed`` | ``failed`` | ``initializing`` | ``insufficient-data`` ).
* ``system-status.status`` - The system status of the instance (``ok`` | ``impaired`` | ``initializing`` | ``insufficient-data`` | ``not-applicable`` ).
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type InstanceIds: list
:param InstanceIds:
The instance IDs.
Default: Describes all your instances.
Constraints: Maximum 100 explicitly specified instance IDs.
- *(string) --*
:type MaxResults: integer
:param MaxResults:
The maximum number of results to return in a single call. To retrieve the remaining results, make another call with the returned ``NextToken`` value. This value can be between 5 and 1000. You cannot specify this parameter and the instance IDs parameter in the same call.
:type NextToken: string
:param NextToken:
The token to retrieve the next page of results.
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type IncludeAllInstances: boolean
:param IncludeAllInstances:
When ``true`` , includes the health status for all instances. When ``false`` , includes the health status for running instances only.
Default: ``false``
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class VolumeAvailable(Waiter):
def wait(self, Filters: List = None, VolumeIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_volumes` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVolumes>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
VolumeIds=[
'string',
],
DryRun=True|False,
MaxResults=123,
NextToken='string',
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
The filters.
* ``attachment.attach-time`` - The time stamp when the attachment initiated.
* ``attachment.delete-on-termination`` - Whether the volume is deleted on instance termination.
* ``attachment.device`` - The device name specified in the block device mapping (for example, ``/dev/sda1`` ).
* ``attachment.instance-id`` - The ID of the instance the volume is attached to.
* ``attachment.status`` - The attachment state (``attaching`` | ``attached`` | ``detaching`` ).
* ``availability-zone`` - The Availability Zone in which the volume was created.
* ``create-time`` - The time stamp when the volume was created.
* ``encrypted`` - Indicates whether the volume is encrypted (``true`` | ``false`` )
* ``size`` - The size of the volume, in GiB.
* ``snapshot-id`` - The snapshot from which the volume was created.
* ``status`` - The status of the volume (``creating`` | ``available`` | ``in-use`` | ``deleting`` | ``deleted`` | ``error`` ).
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``volume-id`` - The volume ID.
* ``volume-type`` - The Amazon EBS volume type. This can be ``gp2`` for General Purpose SSD, ``io1`` for Provisioned IOPS SSD, ``st1`` for Throughput Optimized HDD, ``sc1`` for Cold HDD, or ``standard`` for Magnetic volumes.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type VolumeIds: list
:param VolumeIds:
The volume IDs.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type MaxResults: integer
:param MaxResults:
The maximum number of volume results returned by ``DescribeVolumes`` in paginated output. When this parameter is used, ``DescribeVolumes`` only returns ``MaxResults`` results in a single page along with a ``NextToken`` response element. The remaining results of the initial request can be seen by sending another ``DescribeVolumes`` request with the returned ``NextToken`` value. This value can be between 5 and 500; if ``MaxResults`` is given a value larger than 500, only 500 results are returned. If this parameter is not used, then ``DescribeVolumes`` returns all results. You cannot specify this parameter and the volume IDs parameter in the same request.
:type NextToken: string
:param NextToken:
The ``NextToken`` value returned from a previous paginated ``DescribeVolumes`` request where ``MaxResults`` was used and the results exceeded the value of that parameter. Pagination continues from the end of the previous results that returned the ``NextToken`` value. This value is ``null`` when there are no more results to return.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class VolumeDeleted(Waiter):
def wait(self, Filters: List = None, VolumeIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_volumes` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVolumes>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
VolumeIds=[
'string',
],
DryRun=True|False,
MaxResults=123,
NextToken='string',
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
The filters.
* ``attachment.attach-time`` - The time stamp when the attachment initiated.
* ``attachment.delete-on-termination`` - Whether the volume is deleted on instance termination.
* ``attachment.device`` - The device name specified in the block device mapping (for example, ``/dev/sda1`` ).
* ``attachment.instance-id`` - The ID of the instance the volume is attached to.
* ``attachment.status`` - The attachment state (``attaching`` | ``attached`` | ``detaching`` ).
* ``availability-zone`` - The Availability Zone in which the volume was created.
* ``create-time`` - The time stamp when the volume was created.
* ``encrypted`` - Indicates whether the volume is encrypted (``true`` | ``false`` )
* ``size`` - The size of the volume, in GiB.
* ``snapshot-id`` - The snapshot from which the volume was created.
* ``status`` - The status of the volume (``creating`` | ``available`` | ``in-use`` | ``deleting`` | ``deleted`` | ``error`` ).
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``volume-id`` - The volume ID.
* ``volume-type`` - The Amazon EBS volume type. This can be ``gp2`` for General Purpose SSD, ``io1`` for Provisioned IOPS SSD, ``st1`` for Throughput Optimized HDD, ``sc1`` for Cold HDD, or ``standard`` for Magnetic volumes.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type VolumeIds: list
:param VolumeIds:
The volume IDs.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type MaxResults: integer
:param MaxResults:
The maximum number of volume results returned by ``DescribeVolumes`` in paginated output. When this parameter is used, ``DescribeVolumes`` only returns ``MaxResults`` results in a single page along with a ``NextToken`` response element. The remaining results of the initial request can be seen by sending another ``DescribeVolumes`` request with the returned ``NextToken`` value. This value can be between 5 and 500; if ``MaxResults`` is given a value larger than 500, only 500 results are returned. If this parameter is not used, then ``DescribeVolumes`` returns all results. You cannot specify this parameter and the volume IDs parameter in the same request.
:type NextToken: string
:param NextToken:
The ``NextToken`` value returned from a previous paginated ``DescribeVolumes`` request where ``MaxResults`` was used and the results exceeded the value of that parameter. Pagination continues from the end of the previous results that returned the ``NextToken`` value. This value is ``null`` when there are no more results to return.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class VolumeInUse(Waiter):
def wait(self, Filters: List = None, VolumeIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_volumes` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVolumes>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
VolumeIds=[
'string',
],
DryRun=True|False,
MaxResults=123,
NextToken='string',
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
The filters.
* ``attachment.attach-time`` - The time stamp when the attachment initiated.
* ``attachment.delete-on-termination`` - Whether the volume is deleted on instance termination.
* ``attachment.device`` - The device name specified in the block device mapping (for example, ``/dev/sda1`` ).
* ``attachment.instance-id`` - The ID of the instance the volume is attached to.
* ``attachment.status`` - The attachment state (``attaching`` | ``attached`` | ``detaching`` ).
* ``availability-zone`` - The Availability Zone in which the volume was created.
* ``create-time`` - The time stamp when the volume was created.
* ``encrypted`` - Indicates whether the volume is encrypted (``true`` | ``false`` )
* ``size`` - The size of the volume, in GiB.
* ``snapshot-id`` - The snapshot from which the volume was created.
* ``status`` - The status of the volume (``creating`` | ``available`` | ``in-use`` | ``deleting`` | ``deleted`` | ``error`` ).
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``volume-id`` - The volume ID.
* ``volume-type`` - The Amazon EBS volume type. This can be ``gp2`` for General Purpose SSD, ``io1`` for Provisioned IOPS SSD, ``st1`` for Throughput Optimized HDD, ``sc1`` for Cold HDD, or ``standard`` for Magnetic volumes.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type VolumeIds: list
:param VolumeIds:
The volume IDs.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type MaxResults: integer
:param MaxResults:
The maximum number of volume results returned by ``DescribeVolumes`` in paginated output. When this parameter is used, ``DescribeVolumes`` only returns ``MaxResults`` results in a single page along with a ``NextToken`` response element. The remaining results of the initial request can be seen by sending another ``DescribeVolumes`` request with the returned ``NextToken`` value. This value can be between 5 and 500; if ``MaxResults`` is given a value larger than 500, only 500 results are returned. If this parameter is not used, then ``DescribeVolumes`` returns all results. You cannot specify this parameter and the volume IDs parameter in the same request.
:type NextToken: string
:param NextToken:
The ``NextToken`` value returned from a previous paginated ``DescribeVolumes`` request where ``MaxResults`` was used and the results exceeded the value of that parameter. Pagination continues from the end of the previous results that returned the ``NextToken`` value. This value is ``null`` when there are no more results to return.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class VpcAvailable(Waiter):
def wait(self, Filters: List = None, VpcIds: List = None, DryRun: bool = None, NextToken: str = None, MaxResults: int = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_vpcs` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVpcs>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
VpcIds=[
'string',
],
DryRun=True|False,
NextToken='string',
MaxResults=123,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
One or more filters.
* ``cidr`` - The primary IPv4 CIDR block of the VPC. The CIDR block you specify must exactly match the VPC\'s CIDR block for information to be returned for the VPC. Must contain the slash followed by one or two digits (for example, ``/28`` ).
* ``cidr-block-association.cidr-block`` - An IPv4 CIDR block associated with the VPC.
* ``cidr-block-association.association-id`` - The association ID for an IPv4 CIDR block associated with the VPC.
* ``cidr-block-association.state`` - The state of an IPv4 CIDR block associated with the VPC.
* ``dhcp-options-id`` - The ID of a set of DHCP options.
* ``ipv6-cidr-block-association.ipv6-cidr-block`` - An IPv6 CIDR block associated with the VPC.
* ``ipv6-cidr-block-association.association-id`` - The association ID for an IPv6 CIDR block associated with the VPC.
* ``ipv6-cidr-block-association.state`` - The state of an IPv6 CIDR block associated with the VPC.
* ``isDefault`` - Indicates whether the VPC is the default VPC.
* ``owner-id`` - The ID of the AWS account that owns the VPC.
* ``state`` - The state of the VPC (``pending`` | ``available`` ).
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``vpc-id`` - The ID of the VPC.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type VpcIds: list
:param VpcIds:
One or more VPC IDs.
Default: Describes all your VPCs.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type NextToken: string
:param NextToken:
The token for the next page of results.
:type MaxResults: integer
:param MaxResults:
The maximum number of results to return with a single call. To retrieve the remaining results, make another call with the returned ``nextToken`` value.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class VpcExists(Waiter):
def wait(self, Filters: List = None, VpcIds: List = None, DryRun: bool = None, NextToken: str = None, MaxResults: int = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_vpcs` every 1 seconds until a successful state is reached. An error is returned after 5 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVpcs>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
VpcIds=[
'string',
],
DryRun=True|False,
NextToken='string',
MaxResults=123,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
One or more filters.
* ``cidr`` - The primary IPv4 CIDR block of the VPC. The CIDR block you specify must exactly match the VPC\'s CIDR block for information to be returned for the VPC. Must contain the slash followed by one or two digits (for example, ``/28`` ).
* ``cidr-block-association.cidr-block`` - An IPv4 CIDR block associated with the VPC.
* ``cidr-block-association.association-id`` - The association ID for an IPv4 CIDR block associated with the VPC.
* ``cidr-block-association.state`` - The state of an IPv4 CIDR block associated with the VPC.
* ``dhcp-options-id`` - The ID of a set of DHCP options.
* ``ipv6-cidr-block-association.ipv6-cidr-block`` - An IPv6 CIDR block associated with the VPC.
* ``ipv6-cidr-block-association.association-id`` - The association ID for an IPv6 CIDR block associated with the VPC.
* ``ipv6-cidr-block-association.state`` - The state of an IPv6 CIDR block associated with the VPC.
* ``isDefault`` - Indicates whether the VPC is the default VPC.
* ``owner-id`` - The ID of the AWS account that owns the VPC.
* ``state`` - The state of the VPC (``pending`` | ``available`` ).
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``vpc-id`` - The ID of the VPC.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type VpcIds: list
:param VpcIds:
One or more VPC IDs.
Default: Describes all your VPCs.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type NextToken: string
:param NextToken:
The token for the next page of results.
:type MaxResults: integer
:param MaxResults:
The maximum number of results to return with a single call. To retrieve the remaining results, make another call with the returned ``nextToken`` value.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 1
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 5
:returns: None
"""
pass
class VpcPeeringConnectionDeleted(Waiter):
def wait(self, Filters: List = None, DryRun: bool = None, VpcPeeringConnectionIds: List = None, NextToken: str = None, MaxResults: int = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_vpc_peering_connections` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVpcPeeringConnections>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
DryRun=True|False,
VpcPeeringConnectionIds=[
'string',
],
NextToken='string',
MaxResults=123,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
One or more filters.
* ``accepter-vpc-info.cidr-block`` - The IPv4 CIDR block of the accepter VPC.
* ``accepter-vpc-info.owner-id`` - The AWS account ID of the owner of the accepter VPC.
* ``accepter-vpc-info.vpc-id`` - The ID of the accepter VPC.
* ``expiration-time`` - The expiration date and time for the VPC peering connection.
* ``requester-vpc-info.cidr-block`` - The IPv4 CIDR block of the requester\'s VPC.
* ``requester-vpc-info.owner-id`` - The AWS account ID of the owner of the requester VPC.
* ``requester-vpc-info.vpc-id`` - The ID of the requester VPC.
* ``status-code`` - The status of the VPC peering connection (``pending-acceptance`` | ``failed`` | ``expired`` | ``provisioning`` | ``active`` | ``deleting`` | ``deleted`` | ``rejected`` ).
* ``status-message`` - A message that provides more information about the status of the VPC peering connection, if applicable.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``vpc-peering-connection-id`` - The ID of the VPC peering connection.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type VpcPeeringConnectionIds: list
:param VpcPeeringConnectionIds:
One or more VPC peering connection IDs.
Default: Describes all your VPC peering connections.
- *(string) --*
:type NextToken: string
:param NextToken:
The token for the next page of results.
:type MaxResults: integer
:param MaxResults:
The maximum number of results to return with a single call. To retrieve the remaining results, make another call with the returned ``nextToken`` value.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class VpcPeeringConnectionExists(Waiter):
def wait(self, Filters: List = None, DryRun: bool = None, VpcPeeringConnectionIds: List = None, NextToken: str = None, MaxResults: int = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_vpc_peering_connections` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVpcPeeringConnections>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
DryRun=True|False,
VpcPeeringConnectionIds=[
'string',
],
NextToken='string',
MaxResults=123,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
One or more filters.
* ``accepter-vpc-info.cidr-block`` - The IPv4 CIDR block of the accepter VPC.
* ``accepter-vpc-info.owner-id`` - The AWS account ID of the owner of the accepter VPC.
* ``accepter-vpc-info.vpc-id`` - The ID of the accepter VPC.
* ``expiration-time`` - The expiration date and time for the VPC peering connection.
* ``requester-vpc-info.cidr-block`` - The IPv4 CIDR block of the requester\'s VPC.
* ``requester-vpc-info.owner-id`` - The AWS account ID of the owner of the requester VPC.
* ``requester-vpc-info.vpc-id`` - The ID of the requester VPC.
* ``status-code`` - The status of the VPC peering connection (``pending-acceptance`` | ``failed`` | ``expired`` | ``provisioning`` | ``active`` | ``deleting`` | ``deleted`` | ``rejected`` ).
* ``status-message`` - A message that provides more information about the status of the VPC peering connection, if applicable.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``vpc-peering-connection-id`` - The ID of the VPC peering connection.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type VpcPeeringConnectionIds: list
:param VpcPeeringConnectionIds:
One or more VPC peering connection IDs.
Default: Describes all your VPC peering connections.
- *(string) --*
:type NextToken: string
:param NextToken:
The token for the next page of results.
:type MaxResults: integer
:param MaxResults:
The maximum number of results to return with a single call. To retrieve the remaining results, make another call with the returned ``nextToken`` value.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class VpnConnectionAvailable(Waiter):
def wait(self, Filters: List = None, VpnConnectionIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_vpn_connections` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVpnConnections>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
VpnConnectionIds=[
'string',
],
DryRun=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
One or more filters.
* ``customer-gateway-configuration`` - The configuration information for the customer gateway.
* ``customer-gateway-id`` - The ID of a customer gateway associated with the VPN connection.
* ``state`` - The state of the VPN connection (``pending`` | ``available`` | ``deleting`` | ``deleted`` ).
* ``option.static-routes-only`` - Indicates whether the connection has static routes only. Used for devices that do not support Border Gateway Protocol (BGP).
* ``route.destination-cidr-block`` - The destination CIDR block. This corresponds to the subnet used in a customer data center.
* ``bgp-asn`` - The BGP Autonomous System Number (ASN) associated with a BGP device.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``type`` - The type of VPN connection. Currently the only supported type is ``ipsec.1`` .
* ``vpn-connection-id`` - The ID of the VPN connection.
* ``vpn-gateway-id`` - The ID of a virtual private gateway associated with the VPN connection.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type VpnConnectionIds: list
:param VpnConnectionIds:
One or more VPN connection IDs.
Default: Describes your VPN connections.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
class VpnConnectionDeleted(Waiter):
def wait(self, Filters: List = None, VpnConnectionIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`EC2.Client.describe_vpn_connections` every 15 seconds until a successful state is reached. An error is returned after 40 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVpnConnections>`_
**Request Syntax**
::
waiter.wait(
Filters=[
{
'Name': 'string',
'Values': [
'string',
]
},
],
VpnConnectionIds=[
'string',
],
DryRun=True|False,
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type Filters: list
:param Filters:
One or more filters.
* ``customer-gateway-configuration`` - The configuration information for the customer gateway.
* ``customer-gateway-id`` - The ID of a customer gateway associated with the VPN connection.
* ``state`` - The state of the VPN connection (``pending`` | ``available`` | ``deleting`` | ``deleted`` ).
* ``option.static-routes-only`` - Indicates whether the connection has static routes only. Used for devices that do not support Border Gateway Protocol (BGP).
* ``route.destination-cidr-block`` - The destination CIDR block. This corresponds to the subnet used in a customer data center.
* ``bgp-asn`` - The BGP Autonomous System Number (ASN) associated with a BGP device.
* ``tag`` :<key> - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value. For example, to find all resources that have a tag with the key ``Owner`` and the value ``TeamA`` , specify ``tag:Owner`` for the filter name and ``TeamA`` for the filter value.
* ``tag-key`` - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.
* ``type`` - The type of VPN connection. Currently the only supported type is ``ipsec.1`` .
* ``vpn-connection-id`` - The ID of the VPN connection.
* ``vpn-gateway-id`` - The ID of a virtual private gateway associated with the VPN connection.
- *(dict) --*
A filter name and value pair that is used to return a more specific list of results from a describe operation. Filters can be used to match a set of resources by specific criteria, such as tags, attributes, or IDs. The filters supported by a describe operation are documented with the describe operation. For example:
* DescribeAvailabilityZones
* DescribeImages
* DescribeInstances
* DescribeKeyPairs
* DescribeSecurityGroups
* DescribeSnapshots
* DescribeSubnets
* DescribeTags
* DescribeVolumes
* DescribeVpcs
- **Name** *(string) --*
The name of the filter. Filter names are case-sensitive.
- **Values** *(list) --*
The filter values. Filter values are case-sensitive.
- *(string) --*
:type VpnConnectionIds: list
:param VpnConnectionIds:
One or more VPN connection IDs.
Default: Describes your VPN connections.
- *(string) --*
:type DryRun: boolean
:param DryRun:
Checks whether you have the required permissions for the action, without actually making the request, and provides an error response. If you have the required permissions, the error response is ``DryRunOperation`` . Otherwise, it is ``UnauthorizedOperation`` .
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 40
:returns: None
"""
pass
| 65.198527
| 685
| 0.614409
| 20,733
| 177,014
| 5.241306
| 0.031206
| 0.017761
| 0.009083
| 0.010104
| 0.952691
| 0.946378
| 0.940682
| 0.934397
| 0.930016
| 0.92639
| 0
| 0.010867
| 0.288339
| 177,014
| 2,714
| 686
| 65.22255
| 0.851755
| 0.848656
| 0
| 0.552083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.322917
| false
| 0.333333
| 0.03125
| 0
| 0.677083
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
d8bd496a71078f6eb07a5a8dc2660d3f57aef9e0
| 74
|
py
|
Python
|
exaslct_src/cli/__init__.py
|
mace84/script-languages
|
d586cbe212bbb4efbfb39e095183729c65489360
|
[
"MIT"
] | null | null | null |
exaslct_src/cli/__init__.py
|
mace84/script-languages
|
d586cbe212bbb4efbfb39e095183729c65489360
|
[
"MIT"
] | 1
|
2019-05-06T07:36:11.000Z
|
2019-05-06T07:36:11.000Z
|
exaslct_src/cli/__init__.py
|
mace84/script-languages
|
d586cbe212bbb4efbfb39e095183729c65489360
|
[
"MIT"
] | 1
|
2019-05-03T08:49:29.000Z
|
2019-05-03T08:49:29.000Z
|
from exaslct_src.cli.cli import cli
from exaslct_src.cli.commands import *
| 37
| 38
| 0.837838
| 13
| 74
| 4.615385
| 0.461538
| 0.366667
| 0.466667
| 0.566667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094595
| 74
| 2
| 38
| 37
| 0.895522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d8cc1b065bde7ca304aaebe561e590814f95f2e7
| 29,536
|
py
|
Python
|
mutation_loc_figures.py
|
martynaut/mirnome-mutations
|
2bcee735a3620a0ae6fc91a57500f19b8851ff60
|
[
"MIT"
] | null | null | null |
mutation_loc_figures.py
|
martynaut/mirnome-mutations
|
2bcee735a3620a0ae6fc91a57500f19b8851ff60
|
[
"MIT"
] | 1
|
2021-09-10T12:07:49.000Z
|
2021-09-24T07:04:02.000Z
|
mutation_loc_figures.py
|
martynaut/mirnome-mutations
|
2bcee735a3620a0ae6fc91a57500f19b8851ff60
|
[
"MIT"
] | null | null | null |
import os
import pandas as pd
import matplotlib
import click
from matplotlib import rc
from matplotlib import lines
# matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
import gc
plt.rcParams['svg.fonttype'] = 'none'
image_path = 'reference_files/primirna_background.tiff'
im = plt.imread(image_path)
SMALL_SIZE = 18
MEDIUM_SIZE = 20
BIGGER_SIZE = 22
rc('font', size=MEDIUM_SIZE) # controls default text sizes
rc('axes', titlesize=MEDIUM_SIZE) # fontsize of the axes title
rc('axes', labelsize=MEDIUM_SIZE) # fontsize of the x and y labels
rc('xtick', labelsize=MEDIUM_SIZE) # fontsize of the tick labels
rc('ytick', labelsize=MEDIUM_SIZE) # fontsize of the tick labels
rc('legend', fontsize=SMALL_SIZE) # legend fontsize
rc('figure', titlesize=BIGGER_SIZE) # fontsize of the figure title
def create_plot(data_df, output_name, mutations=0, genes=0, mirna_type='both'):
df_plot_5, loop_value = prepare_data_5p(data_df)
df_plot_3 = prepare_data_3p(data_df)
max_value = max(df_plot_5['pos'].max(), df_plot_3['pos'].max())
rc("pdf", fonttype=42)
sns.set_style(style='white')
palette = {'flanking-5': 'grey',
'flanking-3': 'grey',
'pre-seed': '#5481A6',
'seed': 'darkblue',
'post-seed': '#5481A6',
'loop': 'grey',
'silent-pre': '#5481A6',
'silent-post': '#5481A6',
'silent-seed': '#5481A6'
}
fig = plt.figure(figsize=(25, 10))
ax = fig.add_axes([0, 0, 1, 1])
ax.axis('off')
plt.imshow(im)
plt.xticks([])
plt.yticks([])
y_min, y_max = ax.get_ylim()
x_min, x_max = ax.get_xlim()
plt.text(x_max * 0.92, y_min * 1, str(mutations), horizontalalignment='center',
verticalalignment='center', fontdict={'size': '42'})
plt.text(x_max * 0.92, y_min * 1.09, str(genes), horizontalalignment='center',
verticalalignment='center', fontdict={'size': '42'})
line = lines.Line2D([x_max * 0.89, x_max * 0.95], [y_min * 1.04, y_min * 1.04], lw=0.5, color='black')
ax.add_line(line)
line.set_clip_on(False)
plt.text(x_max * 0.24, y_min * -0.09, 'flanking region', horizontalalignment='center', fontdict={'size': '42'})
line = lines.Line2D([x_max * 0.02, x_max * 0.46], [y_min * -0.07, y_min * -0.07], lw=0.5, color='grey', alpha=0.8)
ax.add_line(line)
line.set_clip_on(False)
plt.text(x_max * 0.65, y_min * -0.09, 'miRNA', horizontalalignment='center', fontdict={'size': '42'})
line = lines.Line2D([x_max * 0.47, x_max * 0.83], [y_min * -0.07, y_min * -0.07], lw=0.5, color='#5481A6',
alpha=0.8)
ax.add_line(line)
line.set_clip_on(False)
plt.text(x_max * 0.91, y_min * -0.09, 'loop', horizontalalignment='center', fontdict={'size': '42'})
line = lines.Line2D([x_max * 0.84, x_max * 0.98], [y_min * -0.07, y_min * -0.07], lw=0.5, color='grey', alpha=0.8)
ax.add_line(line)
line.set_clip_on(False)
if mirna_type != '3p':
plt.text(x_max * 0.54, y_min * 0, 'seed', horizontalalignment='center', fontdict={'size': '42'})
line = lines.Line2D([x_max * 0.49, x_max * 0.59], [y_min * 0.03, y_min * 0.03], lw=0.5, color='darkblue',
alpha=0.8)
ax.add_line(line)
line.set_clip_on(False)
if mirna_type != '5p':
line = lines.Line2D([x_max * 0.675, x_max * 0.775], [y_min * 1.125, y_min * 1.125], lw=0.5, color='darkblue',
alpha=0.8)
ax.add_line(line)
line.set_clip_on(False)
plt.text(x_max * 0.725, y_min * 1.195, 'seed', horizontalalignment='center', fontdict={'size': '42'})
if loop_value > 0:
plt.text(
x_max * 0.92, y_min * 0.6, '+ {} loop\nmutations'.format(loop_value),
horizontalalignment='center',
verticalalignment='center',
fontdict={'size': '30'}
)
a = plt.axes([.058, .52, .82, .35])
hue_order = ['flanking-5', 'pre-seed', 'seed', 'post-seed', 'loop']
if mirna_type == '3p':
hue_order = ['flanking-5', 'silent-pre', 'silent-seed', 'silent-post', 'loop']
df_plot_5['type'] = df_plot_5['type'].apply(
lambda x: 'silent-seed' if x == 'seed' else (
'silent-pre' if x == 'pre-seed' else (
'silent-post' if x == 'post-seed' else x
)
)
)
labels = [str(x) for x in range(-25, 0)] + \
[str(x) for x in range(1, 23)] + ['+' + str(x) for x in range(1, 4)]
labels = ['L' if x == '+4' else x for x in labels]
ax = sns.barplot(x="from_start", y="pos", hue="type",
data=df_plot_5, dodge=False,
hue_order=hue_order,
palette=palette,
ax=a)
for loc in ['right', 'top', 'left', 'bottom']:
ax.spines[loc].set_visible(False)
ax.set_xlabel('')
ax.set_ylabel('')
ax.legend(loc='upper right', ncol=2)
plt.setp(ax.get_legend().get_texts(), fontsize='18')
plt.setp(ax.get_legend().get_title(), fontsize='22')
plt.grid(b=True, which='major', axis='y', color='lightgrey',
linestyle='-', linewidth=0.75, zorder=2, alpha=0.5)
ax.set_xticklabels(labels)
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(30)
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(30)
if max_value > 5:
plt.yticks(np.arange(0, max_value + 1, np.floor(max_value/3)))
plot_limit = max_value + 2
else:
plt.yticks(np.arange(0, max_value + 1, 1))
plot_limit = max_value + 1
ax.set_ylim([0, plot_limit])
ax.xaxis.tick_bottom()
for label in ax.get_xticklabels():
if label.get_text() not in ['-25', '-20', '-15', '-10', '-5', '1', '5', '10', '15', '20',
'+1']:
label.set_visible(False)
new_width = 0.35
for patch in ax.patches:
current_width = patch.get_width()
diff = current_width - new_width
# we change the bar width
patch.set_width(new_width)
# we recenter the bar
patch.set_x(patch.get_x() + diff * .5)
ax.tick_params(axis='both', which='major', pad=8, width=0.5)
plt.setp(ax.patches, linewidth=0)
ax.get_legend().remove()
b = plt.axes([.025, .02, .82, .35], facecolor='w')
hue_order = ['flanking-3', 'pre-seed', 'seed', 'post-seed', 'loop']
labels = ['+' + str(x) for x in range(1, 26)][::-1] + [str(x) for x in range(1, 23)][::-1] + \
['-' + str(x) for x in range(1, 4)]
if mirna_type == '5p':
hue_order = ['flanking-3', 'silent-pre', 'silent-seed', 'silent-post', 'loop']
df_plot_3['type'] = df_plot_3['type'].apply(
lambda x: 'silent-seed' if x == 'seed' else (
'silent-pre' if x == 'pre-seed' else(
'silent-post' if x == 'post-seed' else x
)
)
)
ax = sns.barplot(x="from_start", y="pos", hue="type",
data=df_plot_3, dodge=False,
hue_order=hue_order,
palette=palette,
ax=b)
for loc in ['right', 'top', 'left', 'bottom']:
ax.spines[loc].set_visible(False)
ax.set_xlabel('')
ax.set_ylabel('')
plt.grid(b=True, which='major', axis='y', color='lightgrey',
linestyle='-', linewidth=0.75, zorder=1, alpha=0.5)
ax.set_xticklabels(labels)
if max_value > 5:
plt.yticks(np.arange(0, max_value + 1, np.floor(max_value/3)))
plot_limit = max_value + 2
else:
plt.yticks(np.arange(0, max_value + 1, 1))
plot_limit = max_value + 1
ax.get_legend().remove()
ax.set_ylim([plot_limit, 0])
ax.xaxis.tick_top()
for tick in ax.xaxis.get_major_ticks():
tick.label2.set_fontsize(30)
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(30)
for label in ax.get_xticklabels():
if label.get_text() not in ['+25', '+20', '+15', '+10', '+5', '+1', '1', '5', '10', '15', '20',
'-5']:
label.set_visible(False)
new_width = 0.35
for patch in ax.patches:
current_width = patch.get_width()
diff = current_width - new_width
# we change the bar width
patch.set_width(new_width)
# we recenter the bar
patch.set_x(patch.get_x() + diff * .5)
plt.setp(ax.patches, linewidth=0)
plt.savefig(output_name, format='svg', dpi=300, transparent=True, bbox_inches='tight')
plt.cla()
plt.clf()
plt.close("all")
plt.close(fig)
gc.collect()
def prepare_data_5p(df_temp):
add_loop = df_temp[(df_temp['arm'] == 'loop') & ((df_temp['from_start'] >= 4) & (df_temp['from_end'] <= -4))]
add_loop_value = add_loop.shape[0]
# add_loop_df = pd.DataFrame([['loop', 'loop', 51, add_loop_value]], columns=['arm', 'type',
# 'from_start',
# 'pos'])
dataframe = df_temp[(df_temp['arm'] == '5p') |
((df_temp['arm'] == 'loop') & (df_temp['from_start'] < 4))].groupby(['arm', 'type',
'from_start'],
as_index=False
)[['pos']]. \
count()[[
'arm', 'type',
'from_start',
'pos'
]]
dataframe.loc[(dataframe['type'] == 'pre-seed') & (dataframe['arm'] == '5p'), 'from_start'] = \
dataframe.loc[(dataframe['type'] == 'pre-seed') & (dataframe['arm'] == '5p'), 'from_start'] + 25
dataframe.loc[(dataframe['type'] == 'seed') & (dataframe['arm'] == '5p'), 'from_start'] = \
dataframe.loc[(dataframe['type'] == 'seed') & (dataframe['arm'] == '5p'), 'from_start'] + 26
dataframe.loc[(dataframe['type'] == 'post-seed') & (dataframe['arm'] == '5p'), 'from_start'] = \
dataframe.loc[(dataframe['type'] == 'post-seed') & (dataframe['arm'] == '5p'), 'from_start'] + 33
dataframe.loc[(dataframe['type'] == 'loop') & (dataframe['arm'] == 'loop'), 'from_start'] = \
dataframe.loc[(dataframe['type'] == 'loop') & (dataframe['arm'] == 'loop'), 'from_start'] + 47
for x in range(1, 26):
if dataframe[(dataframe['type'] == 'flanking-5') & (dataframe['from_start'] == x)].shape[0] == 0:
new_row = pd.DataFrame([['5p', 'flanking-5', x, 0]], columns=['arm', 'type',
'from_start',
'pos'])
dataframe = pd.concat([dataframe, new_row])
for x in range(26, 27):
if dataframe[(dataframe['type'] == 'pre-seed') & (dataframe['from_start'] == x)].shape[0] == 0:
new_row = pd.DataFrame([['5p', 'pre-seed', x, 0]], columns=['arm', 'type',
'from_start',
'pos'])
dataframe = pd.concat([dataframe, new_row])
for x in range(27, 34):
if dataframe[(dataframe['type'] == 'seed') & (dataframe['from_start'] == x)].shape[0] == 0:
new_row = pd.DataFrame([['5p', 'seed', x, 0]], columns=['arm', 'type',
'from_start',
'pos'])
dataframe = pd.concat([dataframe, new_row])
for x in range(34, 48):
if dataframe[(dataframe['type'] == 'post-seed') & (dataframe['from_start'] == x)].shape[0] == 0:
new_row = pd.DataFrame([['5p', 'post-seed', x, 0]], columns=['arm', 'type',
'from_start',
'pos'])
dataframe = pd.concat([dataframe, new_row])
dataframe.loc[(dataframe['type'] == 'post-seed') & (dataframe['from_start'] > 47), 'from_start'] = 47
for x in range(48, 53 - 2):
if dataframe[(dataframe['type'] == 'loop') & (dataframe['from_start'] == x)].shape[0] == 0:
new_row = pd.DataFrame([['loop', 'loop', x, 0]], columns=['arm', 'type',
'from_start',
'pos'])
dataframe = pd.concat([dataframe, new_row])
# dataframe = pd.concat([dataframe, add_loop_df])
dataframe = dataframe.groupby(['arm', 'type',
'from_start'],
as_index=False)[['pos']].sum()[['arm', 'type',
'from_start',
'pos']]
return dataframe, add_loop_value
def prepare_data_3p(df_temp):
dataframe = df_temp[(df_temp['arm'] == '3p')].groupby(['arm', 'type', 'from_start'],
as_index=False)[['pos']].count()[['arm', 'type',
'from_start',
'pos']]
try:
dataframe_loop = df_temp[(df_temp['arm'] == 'loop') &
(df_temp['from_end'] > -4)].groupby(['arm', 'type', 'from_end'],
as_index=False)[['pos']].count()[['arm', 'type',
'from_end',
'pos']]
dataframe_loop['from_start'] = dataframe_loop['from_end'].apply(
lambda start: start + 4
)
dataframe_loop.drop('from_end', inplace=True, axis=1)
except KeyError:
dataframe_loop = df_temp[(df_temp['arm'] == 'loop') &
(df_temp['from_end'] > -4)].groupby(['arm', 'type', 'from_end'],
as_index=False)[['pos']].count()[['arm', 'type',
'from_end',
'pos']]
dataframe_loop['from_start'] = dataframe_loop['from_end'].apply(
lambda start: start + 4
)
dataframe_loop.drop('from_end', inplace=True, axis=1)
dataframe = pd.concat([dataframe, dataframe_loop], sort=False)
dataframe.loc[(dataframe['type'] == 'post-seed') & (dataframe['arm'] == '3p'), 'from_start'] = \
dataframe.loc[(dataframe['type'] == 'post-seed') & (dataframe['arm'] == '3p'), 'from_start'] + 13 - 2
dataframe.loc[(dataframe['type'] == 'seed') & (dataframe['arm'] == '3p'), 'from_start'] = \
dataframe.loc[(dataframe['type'] == 'seed') & (dataframe['arm'] == '3p'), 'from_start'] + 6 - 2
dataframe.loc[(dataframe['type'] == 'pre-seed') & (dataframe['arm'] == '3p'), 'from_start'] = \
dataframe.loc[(dataframe['type'] == 'pre-seed') & (dataframe['arm'] == '3p'), 'from_start'] + 5 - 2
dataframe.loc[(dataframe['type'] == 'flanking-3') & (dataframe['arm'] == '3p'), 'from_start'] = \
dataframe.loc[(dataframe['type'] == 'flanking-3') & (dataframe['arm'] == '3p'), 'from_start'] + 27 - 2
for x in range(28 - 2, 53 - 2):
if dataframe[(dataframe['type'] == 'flanking-3') & (dataframe['from_start'] == x)].shape[0] == 0:
new_row = pd.DataFrame([['3p', 'flanking-3', x, 0]], columns=['arm', 'type',
'from_start',
'pos'])
dataframe = pd.concat([dataframe, new_row])
for x in range(14 - 2, 28 - 2):
if dataframe[(dataframe['type'] == 'post-seed') & (dataframe['from_start'] == x)].shape[0] == 0:
new_row = pd.DataFrame([['3p', 'post-seed', x, 0]], columns=['arm', 'type',
'from_start',
'pos'])
dataframe = pd.concat([dataframe, new_row])
dataframe.loc[(dataframe['type'] == 'post-seed') & (dataframe['from_start'] > 27 - 2), 'from_start'] = 27 - 2
for x in range(7 - 2, 14 - 2):
if dataframe[(dataframe['type'] == 'seed') & (dataframe['from_start'] == x)].shape[0] == 0:
new_row = pd.DataFrame([['3p', 'seed', x, 0]], columns=['arm', 'type',
'from_start',
'pos'])
dataframe = pd.concat([dataframe, new_row])
for x in range(6 - 2, 7 - 2):
if dataframe[(dataframe['type'] == 'pre-seed') & (dataframe['from_start'] == x)].shape[0] == 0:
new_row = pd.DataFrame([['3p', 'pre-seed', x, 0]], columns=['arm', 'type',
'from_start',
'pos'])
dataframe = pd.concat([dataframe, new_row])
for x in range(1, 6 - 2):
if dataframe[(dataframe['type'] == 'loop') & (dataframe['from_start'] == x)].shape[0] == 0:
new_row = pd.DataFrame([['loop', 'loop', x, 0]], columns=['arm', 'type',
'from_start',
'pos'])
dataframe = pd.concat([dataframe, new_row])
dataframe['from_start'] = dataframe['from_start'].apply(lambda start: start * -1)
dataframe = dataframe.groupby(['arm', 'type',
'from_start'],
as_index=False)[['pos']].sum()[['arm', 'type',
'from_start',
'pos']]
return dataframe
def prepare_figure(output_folder):
if not os.path.exists(output_folder + '/plots'):
os.makedirs(output_folder + '/plots')
df_temp = pd.read_csv(output_folder + '/all_mutations_with_n_hgvs.csv')
# df_temp = df_temp[df_temp['mutation_type'] == 'subst']
mutations = df_temp.shape[0]
genes = df_temp['pre_name'].nunique()
create_plot(df_temp, output_folder + '/plots/plot_miRNA.svg',
mutations, genes, mirna_type='both')
# 5' dominant miRNAs
df_5_dominant = df_temp[df_temp['balance'] == '5p']
mutations = df_5_dominant.shape[0]
genes = df_5_dominant['pre_name'].nunique()
create_plot(df_5_dominant, output_folder + '/plots/plot_5p_balance_miRNA.svg',
mutations, genes, mirna_type='5p')
# 3p dominant miRNAs
df_3_dominant = df_temp[df_temp['balance'] == '3p']
mutations = df_3_dominant.shape[0]
genes = df_3_dominant['pre_name'].nunique()
create_plot(df_3_dominant, output_folder + '/plots/plot_3p_balance_miRNA.svg',
mutations, genes, mirna_type='3p')
# balanced miRNAs
df_no_dominant = df_temp[df_temp['balance'] == 'both']
mutations = df_no_dominant.shape[0]
genes = df_no_dominant['pre_name'].nunique()
create_plot(df_no_dominant, output_folder + '/plots/plot_balanced_miRNA.svg',
mutations, genes, mirna_type='both')
def create_plot_per_mirna(data_df, output_name, mutations=0, types='both', title1='title1', title='title'):
df_plot_5, loop_value = prepare_data_5p(data_df)
df_plot_3 = prepare_data_3p(data_df)
max_value = max(df_plot_5['pos'].max(), df_plot_3['pos'].max())
rc("pdf", fonttype=42)
sns.set_style(style='white')
palette = {'flanking-5': 'grey',
'flanking-3': 'grey',
'pre-seed': '#5481A6',
'seed': 'darkblue',
'post-seed': '#5481A6',
'loop': 'grey',
'silent-pre': '#5481A6',
'silent-post': '#5481A6',
'silent-seed': '#5481A6'
}
fig = plt.figure(figsize=(25, 10))
ax = fig.add_axes([0, 0, 1, 1])
ax.axis('off')
plt.imshow(im)
plt.xticks([])
plt.yticks([])
y_min, y_max = ax.get_ylim()
x_min, x_max = ax.get_xlim()
plt.text(0, y_min * -0.5, title1.replace('mir', 'miR'), horizontalalignment='left', weight='bold',
verticalalignment='center', fontdict={'size': '38'})
plt.text(0, y_min * -0.35, title, horizontalalignment='left',
verticalalignment='center', fontdict={'size': '38'})
plt.text(x_max * 0.92, y_min * 1, str(mutations), horizontalalignment='center',
verticalalignment='center', fontdict={'size': '42'})
plt.text(x_max * 0.24, y_min * -0.09, 'flanking region', horizontalalignment='center', fontdict={'size': '42'})
line = lines.Line2D([x_max * 0.02, x_max * 0.46], [y_min * -0.07, y_min * -0.07], lw=0.5, color='grey',
alpha=0.8)
ax.add_line(line)
line.set_clip_on(False)
plt.text(x_max * 0.65, y_min * -0.09, 'miRNA', horizontalalignment='center', fontdict={'size': '42'})
line = lines.Line2D([x_max * 0.47, x_max * 0.83], [y_min * -0.07, y_min * -0.07], lw=0.5, color='#5481A6',
alpha=0.8)
ax.add_line(line)
line.set_clip_on(False)
plt.text(x_max * 0.91, y_min * -0.09, 'loop', horizontalalignment='center', fontdict={'size': '42'})
line = lines.Line2D([x_max * 0.84, x_max * 0.98], [y_min * -0.07, y_min * -0.07], lw=0.5, color='grey',
alpha=0.8)
ax.add_line(line)
line.set_clip_on(False)
if types != '3p':
plt.text(x_max * 0.54, y_min * 0, 'seed', horizontalalignment='center', fontdict={'size': '42'})
line = lines.Line2D([x_max * 0.49, x_max * 0.59], [y_min * 0.03, y_min * 0.03], lw=0.5, color='darkblue',
alpha=0.8)
ax.add_line(line)
line.set_clip_on(False)
if types != '5p':
line = lines.Line2D([x_max * 0.675, x_max * 0.775], [y_min * 1.125, y_min * 1.125], lw=0.5,
color='darkblue',
alpha=0.8)
ax.add_line(line)
line.set_clip_on(False)
plt.text(x_max * 0.725, y_min * 1.195, 'seed', horizontalalignment='center', fontdict={'size': '42'})
if loop_value > 0:
plt.text(
x_max * 0.92, y_min * 0.6, '+ {} loop\nmutations'.format(loop_value),
horizontalalignment='center',
verticalalignment='center',
fontdict={'size': '30'}
)
a = plt.axes([.058, .52, .82, .35])
hue_order = ['flanking-5', 'pre-seed', 'seed', 'post-seed', 'loop']
if types == '3p':
hue_order = ['flanking-5', 'silent-pre', 'silent-seed', 'silent-post', 'loop']
df_plot_5['type'] = df_plot_5['type'].apply(
lambda x: 'silent-seed' if x == 'seed' else (
'silent-pre' if x == 'pre-seed' else (
'silent-post' if x == 'post-seed' else x
)
)
)
labels = [str(x) for x in range(-25, 0)] + \
[str(x) for x in range(1, 23)] + ['+' + str(x) for x in range(1, 4)]
labels = ['L' if x == '+4' else x for x in labels]
ax = sns.barplot(x="from_start", y="pos", hue="type",
data=df_plot_5, dodge=False,
hue_order=hue_order,
palette=palette,
ax=a)
for loc in ['right', 'top', 'left', 'bottom']:
ax.spines[loc].set_visible(False)
ax.set_xlabel('')
ax.set_ylabel('')
ax.legend(loc='upper right', ncol=2)
plt.setp(ax.get_legend().get_texts(), fontsize='18')
plt.setp(ax.get_legend().get_title(), fontsize='22')
plt.grid(b=True, which='major', axis='y', color='lightgrey',
linestyle='-', linewidth=0.75, zorder=2, alpha=0.5)
ax.set_xticklabels(labels)
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(30)
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(30)
if max_value > 5:
plt.yticks(np.arange(0, max_value + 1, np.floor(max_value / 3)))
plot_limit = max_value + 2
else:
plt.yticks(np.arange(0, max_value + 1, 1))
plot_limit = max_value + 1
ax.set_ylim([0, plot_limit])
ax.xaxis.tick_bottom()
for label in ax.get_xticklabels():
if label.get_text() not in ['-25', '-20', '-15', '-10', '-5', '1', '5', '10', '15', '20',
'+1']:
label.set_visible(False)
new_width = 0.35
for patch in ax.patches:
current_width = patch.get_width()
diff = current_width - new_width
# we change the bar width
patch.set_width(new_width)
# we recenter the bar
patch.set_x(patch.get_x() + diff * .5)
ax.tick_params(axis='both', which='major', pad=8, width=0.5)
plt.setp(ax.patches, linewidth=0)
ax.get_legend().remove()
b = plt.axes([.025, .02, .82, .35], facecolor='w')
hue_order = ['flanking-3', 'pre-seed', 'seed', 'post-seed', 'loop']
labels = ['+' + str(x) for x in range(1, 26)][::-1] + [str(x) for x in range(1, 23)][::-1] + \
['-' + str(x) for x in range(1, 4)]
if types == '5p':
hue_order = ['flanking-3', 'silent-pre', 'silent-seed', 'silent-post', 'loop']
df_plot_3['type'] = df_plot_3['type'].apply(
lambda x: 'silent-seed' if x == 'seed' else (
'silent-pre' if x == 'pre-seed' else (
'silent-post' if x == 'post-seed' else x
)
)
)
ax = sns.barplot(x="from_start", y="pos", hue="type",
data=df_plot_3, dodge=False,
hue_order=hue_order,
palette=palette,
ax=b)
for loc in ['right', 'top', 'left', 'bottom']:
ax.spines[loc].set_visible(False)
ax.set_xlabel('')
ax.set_ylabel('')
plt.grid(b=True, which='major', axis='y', color='lightgrey',
linestyle='-', linewidth=0.75, zorder=1, alpha=0.5)
ax.set_xticklabels(labels)
if max_value > 5:
plt.yticks(np.arange(0, max_value + 1, np.floor(max_value / 3)))
plot_limit = max_value + 2
else:
plt.yticks(np.arange(0, max_value + 1, 1))
plot_limit = max_value + 1
ax.get_legend().remove()
ax.set_ylim([plot_limit, 0])
ax.xaxis.tick_top()
for tick in ax.xaxis.get_major_ticks():
tick.label2.set_fontsize(30)
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(30)
for label in ax.get_xticklabels():
if label.get_text() not in ['+25', '+20', '+15', '+10', '+5', '+1', '1', '5', '10', '15', '20',
'-5']:
label.set_visible(False)
new_width = 0.35
for patch in ax.patches:
current_width = patch.get_width()
diff = current_width - new_width
# we change the bar width
patch.set_width(new_width)
# we recenter the bar
patch.set_x(patch.get_x() + diff * .5)
plt.setp(ax.patches, linewidth=0)
plt.savefig(output_name, format='svg', dpi=300, transparent=True, bbox_inches='tight')
plt.cla()
plt.clf()
plt.close("all")
plt.close(fig)
gc.collect()
def prepare_figures_per_mirna(output_folder):
if not os.path.exists(output_folder + '/plots'):
os.makedirs(output_folder + '/plots')
if not os.path.exists(output_folder + '/plots/miRNAs'):
os.makedirs(output_folder + '/plots/miRNAs')
df_temp = pd.read_csv(output_folder + '/all_mutations_with_n_hgvs.csv')
# df_temp = df_temp[df_temp['mutation_type'] == 'subst']
mirnas = list(set(df_temp['pre_name'].unique()))
for mirna in mirnas:
title = ''
title1 = mirna
df_temp2 = df_temp[df_temp['pre_name'] == mirna].copy()
type_of_miRNA = list(df_temp2['balance'].unique())[0]
mutations = df_temp2.shape[0]
create_plot_per_mirna(df_temp2, output_folder + '/plots/miRNAs/plot_miRNA_{}.svg'.format(mirna),
mutations, types=type_of_miRNA, title1=title1, title=title)
del df_temp2
gc.collect()
@click.command()
@click.argument('output_folder')
def main(output_folder):
prepare_figure(output_folder)
prepare_figures_per_mirna(output_folder)
if __name__ == "__main__":
main()
| 39.224436
| 118
| 0.502709
| 3,687
| 29,536
| 3.852183
| 0.082181
| 0.037386
| 0.013025
| 0.017039
| 0.883335
| 0.864888
| 0.845596
| 0.810955
| 0.805956
| 0.796803
| 0
| 0.045601
| 0.328074
| 29,536
| 752
| 119
| 39.276596
| 0.670059
| 0.029219
| 0
| 0.712454
| 0
| 0
| 0.123957
| 0.008587
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012821
| false
| 0
| 0.018315
| 0
| 0.034799
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d8f12092b1d58a22c18f50fc871f93771ff80296
| 6,090
|
py
|
Python
|
scvelo/core/tests/test_arithmetic.py
|
WeilerP/scvelo
|
1805ab4a72d3f34496f0ef246500a159f619d3a2
|
[
"BSD-3-Clause"
] | 272
|
2018-08-21T08:59:11.000Z
|
2022-03-30T11:24:19.000Z
|
scvelo/core/tests/test_arithmetic.py
|
theislab/scvelo
|
1805ab4a72d3f34496f0ef246500a159f619d3a2
|
[
"BSD-3-Clause"
] | 570
|
2018-08-21T14:04:03.000Z
|
2022-03-30T08:48:04.000Z
|
scvelo/core/tests/test_arithmetic.py
|
WeilerP/scvelo
|
1805ab4a72d3f34496f0ef246500a159f619d3a2
|
[
"BSD-3-Clause"
] | 105
|
2018-09-04T14:08:58.000Z
|
2022-03-17T16:20:14.000Z
|
from typing import List
from hypothesis import given
from hypothesis import strategies as st
from hypothesis.extra.numpy import arrays
import numpy as np
from numpy import ndarray
from numpy.testing import assert_almost_equal, assert_array_equal
from scvelo.core import clipped_log, invert, prod_sum, sum
class TestClippedLog:
@given(
a=arrays(
float,
shape=st.integers(min_value=1, max_value=100),
elements=st.floats(
min_value=-1e3, max_value=1e3, allow_infinity=False, allow_nan=False
),
),
bounds=st.lists(
st.floats(
min_value=0, max_value=100, allow_infinity=False, allow_nan=False
),
min_size=2,
max_size=2,
unique=True,
),
eps=st.floats(
min_value=1e-6, max_value=1, allow_infinity=False, allow_nan=False
),
)
def test_flat_arrays(self, a: ndarray, bounds: List[float], eps: float):
lb = min(bounds)
ub = max(bounds) + 2 * eps
a_logged = clipped_log(a, lb=lb, ub=ub, eps=eps)
assert a_logged.shape == a.shape
if (a <= lb).any():
assert_almost_equal(np.abs(a_logged - np.log(lb + eps)).min(), 0)
else:
assert (a_logged >= np.log(lb + eps)).all()
if (a >= ub).any():
assert_almost_equal(np.abs(a_logged - np.log(ub - eps)).min(), 0)
else:
assert (a_logged <= np.log(ub - eps)).all()
@given(
a=arrays(
float,
shape=st.tuples(
st.integers(min_value=1, max_value=100),
st.integers(min_value=1, max_value=100),
),
elements=st.floats(
min_value=-1e3, max_value=1e3, allow_infinity=False, allow_nan=False
),
),
bounds=st.lists(
st.floats(
min_value=0, max_value=100, allow_infinity=False, allow_nan=False
),
min_size=2,
max_size=2,
unique=True,
),
eps=st.floats(
min_value=1e-6, max_value=1, allow_infinity=False, allow_nan=False
),
)
def test_2d_arrays(self, a: ndarray, bounds: List[float], eps: float):
lb = min(bounds)
ub = max(bounds) + 2 * eps
a_logged = clipped_log(a, lb=lb, ub=ub, eps=eps)
assert a_logged.shape == a.shape
if (a <= lb).any():
assert_almost_equal(np.abs(a_logged - np.log(lb + eps)).min(), 0)
else:
assert (a_logged >= np.log(lb + eps)).all()
if (a >= ub).any():
assert_almost_equal(np.abs(a_logged - np.log(ub - eps)).min(), 0)
else:
assert (a_logged <= np.log(ub - eps)).all()
class TestInvert:
@given(
a=arrays(
float,
shape=st.integers(min_value=1, max_value=100),
elements=st.floats(max_value=1e3, allow_infinity=False, allow_nan=False),
)
)
def test_flat_arrays(self, a: ndarray):
a_inv = invert(a)
if a[a != 0].size == 0:
assert a_inv[a != 0].size == 0
else:
assert_array_equal(a_inv[a != 0], 1 / a[a != 0])
if 0 in a:
assert np.isnan(a_inv[a == 0]).all()
else:
assert set(a_inv[a == 0]) == set()
@given(
a=arrays(
float,
shape=st.tuples(
st.integers(min_value=1, max_value=100),
st.integers(min_value=1, max_value=100),
),
elements=st.floats(max_value=1e3, allow_infinity=False, allow_nan=False),
)
)
def test_2d_arrays(self, a: ndarray):
a_inv = invert(a)
if a[a != 0].size == 0:
assert a_inv[a != 0].size == 0
else:
assert_array_equal(a_inv[a != 0], 1 / a[a != 0])
if 0 in a:
assert np.isnan(a_inv[a == 0]).all()
else:
assert set(a_inv[a == 0]) == set()
# TODO: Extend test to generate sparse inputs as well
# TODO: Make test to generate two different arrays a1, a2
# TODO: Check why tests fail with assert_almost_equal
class TestProdSum:
@given(
a=arrays(
float,
shape=st.integers(min_value=1, max_value=100),
elements=st.floats(max_value=1e3, allow_infinity=False, allow_nan=False),
),
axis=st.integers(min_value=0, max_value=1),
)
def test_flat_array(self, a: ndarray, axis: int):
assert np.allclose((a * a).sum(axis=0), prod_sum(a, a, axis=axis))
@given(
a=arrays(
float,
shape=st.tuples(
st.integers(min_value=1, max_value=100),
st.integers(min_value=1, max_value=100),
),
elements=st.floats(max_value=1e3, allow_infinity=False, allow_nan=False),
),
axis=st.integers(min_value=0, max_value=1),
)
def test_2d_array(self, a: ndarray, axis: int):
assert np.allclose((a * a).sum(axis=axis), prod_sum(a, a, axis=axis))
# TODO: Extend test to generate sparse inputs as well
class TestSum:
@given(
a=arrays(
float,
shape=st.integers(min_value=1, max_value=100),
elements=st.floats(max_value=1e3, allow_infinity=False, allow_nan=False),
),
)
def test_flat_arrays(self, a: ndarray):
a_summed = sum(a=a, axis=0)
assert_array_equal(a_summed, a.sum(axis=0))
@given(
a=arrays(
float,
shape=st.tuples(
st.integers(min_value=1, max_value=100),
st.integers(min_value=1, max_value=100),
),
elements=st.floats(max_value=1e3, allow_infinity=False, allow_nan=False),
),
axis=st.integers(min_value=0, max_value=1),
)
def test_2d_arrays(self, a: ndarray, axis: int):
a_summed = sum(a=a, axis=axis)
if a.ndim == 1:
axis = 0
assert_array_equal(a_summed, a.sum(axis=axis))
| 30.757576
| 85
| 0.545156
| 838
| 6,090
| 3.78759
| 0.115752
| 0.068053
| 0.061437
| 0.085066
| 0.86673
| 0.863264
| 0.844045
| 0.838374
| 0.838374
| 0.811909
| 0
| 0.031494
| 0.327422
| 6,090
| 197
| 86
| 30.913706
| 0.743408
| 0.034647
| 0
| 0.784431
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005076
| 0.137725
| 1
| 0.047904
| false
| 0
| 0.047904
| 0
| 0.11976
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2b171d110e18d90880f80a77c7b0a728a6c26712
| 3,636
|
py
|
Python
|
ivy/functional/ivy/array_api/utility_functions.py
|
sert121/ivy
|
286f86e487b0c83d46a3ef8d30aa96316337db32
|
[
"Apache-2.0"
] | 1
|
2022-03-24T20:09:20.000Z
|
2022-03-24T20:09:20.000Z
|
ivy/functional/ivy/array_api/utility_functions.py
|
sert121/ivy
|
286f86e487b0c83d46a3ef8d30aa96316337db32
|
[
"Apache-2.0"
] | null | null | null |
ivy/functional/ivy/array_api/utility_functions.py
|
sert121/ivy
|
286f86e487b0c83d46a3ef8d30aa96316337db32
|
[
"Apache-2.0"
] | null | null | null |
# global
from typing import Union, Optional, Tuple, List
# local
import ivy
from ivy.framework_handler import current_framework as _cur_framework
# noinspection PyShadowingBuiltins
def all(x: Union[ivy.Array, ivy.NativeArray],
axis: Optional[Union[int, Tuple[int], List[int]]] = None,
keepdims: bool = False)\
-> ivy.Array:
"""
Tests whether all input array elements evaluate to True along a specified axis.
:param x: input array.
:param axis: axis or axes along which to perform a logical AND reduction. By default, a logical AND reduction must
be performed over the entire array. If a tuple of integers, logical AND reductions must be performed over multiple
axes. A valid axis must be an integer on the interval [-N, N), where N is the rank (number of dimensions) of x.
If an axis is specified as a negative integer, the function must determine the axis along which to perform a
reduction by counting backward from the last dimension (where -1 refers to the last dimension). If provided an
invalid axis, the function must raise an exception. Default: None.
:param keepdims: If True, the reduced axes (dimensions) must be included in the result as singleton dimensions,
and, accordingly, the result must be compatible with the input array (see Broadcasting). Otherwise, if False,
the reduced axes (dimensions) must not be included in the result. Default is False.
:return: if a logical AND reduction was performed over the entire array, the returned array must be a
zero-dimensional array containing the test result; otherwise, the returned array must be a
non-zero-dimensional array containing the test results. The returned array must have a data type of bool.
"""
return _cur_framework(x).all(x, axis, keepdims)
# noinspection PyShadowingBuiltins
def any(x: Union[ivy.Array, ivy.NativeArray],
axis: Optional[Union[int, Tuple[int], List[int]]] = None,
keepdims: bool = False)\
-> ivy.Array:
"""
Tests whether any input array element evaluate to True along a specified axis.
:param x: input array.
:param axis: axis or axes along which to perform a logical OR reduction. By default, a logical OR reduction must be
performed over the entire array. If a tuple of integers, logical OR reductions must be performed over
multiple axes. A valid axis must be an integer on the interval [-N, N), where N is the rank (number of
dimensions) of x. If an axis is specified as a negative integer, the function must determine the axis
along which to perform a reduction by counting backward from the last dimension (where -1 refers to the
last dimension). If provided an invalid axis, the function must raise an exception. Default: None.
:param keepdims: If True, the reduced axes (dimensions) must be included in the result as singleton dimensions,
and, accordingly, the result must be compatible with the input array (see Broadcasting). Otherwise,
if False, the reduced axes (dimensions) must not be included in the result. Default is False.
:return: if a logical OR reduction was performed over the entire array, the returned array must be a
zero-dimensional array containing the test result; otherwise, the returned array must be a
non-zero-dimensional array containing the test results. The returned array must have a data type of bool.
"""
return _cur_framework(x).any(x, axis, keepdims)
| 63.789474
| 122
| 0.706821
| 533
| 3,636
| 4.806754
| 0.204503
| 0.032787
| 0.037471
| 0.046838
| 0.889149
| 0.868852
| 0.868852
| 0.868852
| 0.868852
| 0.868852
| 0
| 0.000722
| 0.237624
| 3,636
| 56
| 123
| 64.928571
| 0.923521
| 0.80088
| 0
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0
| 0.230769
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
2b4dd386cfee73b3c69f545c796d28cb1d986830
| 44,902
|
py
|
Python
|
keras_segmentation/train.py
|
mwaseema/image-segmentation-keras-implementation
|
e137b55c3a19787309f086744e3f7ed1b4df4520
|
[
"MIT"
] | 1
|
2021-12-09T10:33:18.000Z
|
2021-12-09T10:33:18.000Z
|
keras_segmentation/train.py
|
mwaseema/image-segmentation-keras-implementation
|
e137b55c3a19787309f086744e3f7ed1b4df4520
|
[
"MIT"
] | null | null | null |
keras_segmentation/train.py
|
mwaseema/image-segmentation-keras-implementation
|
e137b55c3a19787309f086744e3f7ed1b4df4520
|
[
"MIT"
] | 1
|
2022-02-11T18:59:43.000Z
|
2022-02-11T18:59:43.000Z
|
import json
import os
import six
from keras import optimizers
from keras.callbacks import ReduceLROnPlateau
from . import custom_losses
from .custom_losses import smooth_l1_loss
from .data_utils.bounding_box_based_network_utils import bounding_box_based_network_loss_gpu
from .data_utils.bounding_box_iou_based_network_utils import bounding_box_iou_based_network_loss, \
bounding_box_iou_based_network_metric
from .data_utils.data_loader import image_segmentation_generator, IoU_network_image_segmentation_generator, \
verify_segmentation_dataset, two_stream_verify_segmentation_dataset, two_stream_image_segmentation_generator, \
image_segmentation_generator_i3d, image_segmentation_generator_bounding_box_based_network, \
image_segmentation_generator_bounding_box_iou_based_network, image_segmentation_generator_with_weighted_output, \
image_segmentation_generator_i3d_inception, image_segmentation_temporal_generator_with_weighted_output
from .data_utils.iou_utils import iou_metric_wrapper
from .models import model_from_name
def find_latest_checkpoint(checkpoints_path):
ep = 0
r = None
while True:
if os.path.isfile(checkpoints_path + "." + str(ep)):
r = checkpoints_path + "." + str(ep)
else:
return r
ep += 1
def replace_previous_checkpoint_with_empty_file(checkpoint_path, epoch_number):
if epoch_number > 0:
previous_check_point_path = f"{checkpoint_path}.{epoch_number - 1}"
if os.path.exists(previous_check_point_path):
os.remove(previous_check_point_path)
# make new empty file
f = open(previous_check_point_path, mode='w')
f.close()
def train(model,
train_images,
train_annotations,
input_height=None,
input_width=None,
n_classes=None,
verify_dataset=True,
checkpoints_path=None,
epochs=5,
batch_size=2,
validate=False,
val_images=None,
val_annotations=None,
val_batch_size=2,
auto_resume_checkpoint=False,
load_weights=None,
steps_per_epoch=512,
optimizer_name='adam'
):
if isinstance(model, six.string_types): # check if user gives model name insteead of the model object
# create the model from the name
assert (not n_classes is None), "Please provide the n_classes"
if (not input_height is None) and (not input_width is None):
model = model_from_name[model](n_classes, input_height=input_height, input_width=input_width)
else:
model = model_from_name[model](n_classes)
n_classes = model.n_classes
input_height = model.input_height
input_width = model.input_width
output_height = model.output_height
output_width = model.output_width
if validate:
assert not (val_images is None)
assert not (val_annotations is None)
if not optimizer_name is None:
# model.compile(loss='categorical_crossentropy',
# optimizer= optimizer_name ,
# metrics=['accuracy'])
# model.compile(loss=[custom_losses.categorical_focal_loss_with_iou(alpha=0.50, gamma=1.25, model=model)],
# optimizer=optimizer_name,
# metrics=['accuracy', iou_metric_wrapper(output_height, output_width, n_classes)])
model.compile(loss=[custom_losses.categorical_focal_loss_with_iou(alpha=0.50, gamma=1.25, model=model)],
optimizer=optimizer_name,
metrics=['accuracy'])
if not checkpoints_path is None:
open(checkpoints_path + "_config.json", "w").write(json.dumps({
"model_class": model.model_name,
"n_classes": n_classes,
"input_height": input_height,
"input_width": input_width,
"output_height": output_height,
"output_width": output_width
}))
if (not (load_weights is None)) and len(load_weights) > 0:
print("Loading weights from ", load_weights)
model.load_weights(load_weights)
if auto_resume_checkpoint and (not checkpoints_path is None):
latest_checkpoint = find_latest_checkpoint(checkpoints_path)
if not latest_checkpoint is None:
print("Loading the weights from latest checkpoint ", latest_checkpoint)
model.load_weights(latest_checkpoint)
if verify_dataset:
print("Verifying train dataset")
verify_segmentation_dataset(train_images, train_annotations, n_classes)
if validate:
print("Verifying val dataset")
verify_segmentation_dataset(val_images, val_annotations, n_classes)
train_gen = image_segmentation_generator(train_images, train_annotations, batch_size, n_classes, input_height,
input_width, output_height, output_width)
if validate:
val_gen = image_segmentation_generator(val_images, val_annotations, val_batch_size, n_classes, input_height,
input_width, output_height, output_width)
if not validate:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
else:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, validation_data=val_gen, validation_steps=200, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
def train_i3d_inception(model,
train_features_folder,
train_annotations_folder,
input_height=None,
input_width=None,
n_classes=None,
verify_dataset=True,
checkpoints_path=None,
epochs=5,
batch_size=2,
validate=False,
val_images=None,
val_annotations=None,
val_batch_size=2,
auto_resume_checkpoint=False,
load_weights=None,
steps_per_epoch=512,
optimizer_name='adam'
):
if isinstance(model, six.string_types): # check if user gives model name insteead of the model object
# create the model from the name
assert (not n_classes is None), "Please provide the n_classes"
if (not input_height is None) and (not input_width is None):
model = model_from_name[model](n_classes, input_height=input_height, input_width=input_width)
else:
model = model_from_name[model](n_classes)
n_classes = model.n_classes
input_height = model.input_height
input_width = model.input_width
output_height = model.output_height
output_width = model.output_width
if validate:
assert not (val_images is None)
assert not (val_annotations is None)
if not optimizer_name is None:
# model.compile(loss='categorical_crossentropy',
# optimizer= optimizer_name ,
# metrics=['accuracy'])
# model.compile(loss=[custom_losses.categorical_focal_loss_with_iou(alpha=0.50, gamma=1.25, model=model)],
# optimizer=optimizer_name,
# metrics=['accuracy', iou_metric_wrapper(output_height, output_width, n_classes)])
model.compile(loss=[custom_losses.categorical_focal_loss_with_iou(alpha=0.50, gamma=1.25, model=model)],
optimizer=optimizer_name,
metrics=['accuracy'])
if not checkpoints_path is None:
open(checkpoints_path + "_config.json", "w").write(json.dumps({
"model_class": model.model_name,
"n_classes": n_classes,
"input_height": input_height,
"input_width": input_width,
"output_height": output_height,
"output_width": output_width
}))
if (not (load_weights is None)) and len(load_weights) > 0:
print("Loading weights from ", load_weights)
model.load_weights(load_weights)
if auto_resume_checkpoint and (not checkpoints_path is None):
latest_checkpoint = find_latest_checkpoint(checkpoints_path)
if not latest_checkpoint is None:
print("Loading the weights from latest checkpoint ", latest_checkpoint)
model.load_weights(latest_checkpoint)
if verify_dataset:
print("Verifying train dataset")
verify_segmentation_dataset(train_features_folder, train_annotations_folder, n_classes)
if validate:
print("Verifying val dataset")
verify_segmentation_dataset(train_features_folder, train_annotations_folder, n_classes)
train_gen = image_segmentation_generator_i3d_inception(train_features_folder, train_annotations_folder, batch_size,
n_classes, input_height, input_width, output_height,
output_width)
if validate:
val_gen = image_segmentation_generator_i3d_inception(val_images, val_annotations, val_batch_size, n_classes,
input_height, input_width, output_height, output_width)
if not validate:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
else:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, validation_data=val_gen, validation_steps=200, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
def train_with_weighted_output(model,
train_images,
train_annotations,
input_height=None,
input_width=None,
n_classes=None,
verify_dataset=True,
checkpoints_path=None,
epochs=5,
batch_size=2,
validate=False,
val_images=None,
val_annotations=None,
val_batch_size=2,
auto_resume_checkpoint=False,
load_weights=None,
steps_per_epoch=512,
optimizer_name='adam'
):
if isinstance(model, six.string_types): # check if user gives model name insteead of the model object
# create the model from the name
assert (not n_classes is None), "Please provide the n_classes"
if (not input_height is None) and (not input_width is None):
model = model_from_name[model](n_classes, input_height=input_height, input_width=input_width)
else:
model = model_from_name[model](n_classes)
n_classes = model.n_classes
input_height = model.input_height
input_width = model.input_width
output_height = model.output_height
output_width = model.output_width
if validate:
assert not (val_images is None)
assert not (val_annotations is None)
if not optimizer_name is None:
# model.compile(loss='categorical_crossentropy',
# optimizer= optimizer_name ,
# metrics=['accuracy'])
# model.compile(loss=[custom_losses.categorical_focal_loss_with_iou(alpha=0.50, gamma=1.25, model=model)],
# optimizer=optimizer_name,
# metrics=['accuracy', iou_metric_wrapper(output_height, output_width, n_classes)])
model.compile(loss={
"main_output_activation": custom_losses.categorical_focal_loss_with_iou(alpha=0.50, gamma=1.25,
model=model),
"second_output_activation": smooth_l1_loss,
},
optimizer=optimizer_name,
metrics=['accuracy'])
if not checkpoints_path is None:
open(checkpoints_path + "_config.json", "w").write(json.dumps({
"model_class": model.model_name,
"n_classes": n_classes,
"input_height": input_height,
"input_width": input_width,
"output_height": output_height,
"output_width": output_width
}))
if (not (load_weights is None)) and len(load_weights) > 0:
print("Loading weights from ", load_weights)
model.load_weights(load_weights)
if auto_resume_checkpoint and (not checkpoints_path is None):
latest_checkpoint = find_latest_checkpoint(checkpoints_path)
if not latest_checkpoint is None:
print("Loading the weights from latest checkpoint ", latest_checkpoint)
model.load_weights(latest_checkpoint)
if verify_dataset:
print("Verifying train dataset")
verify_segmentation_dataset(train_images, train_annotations, n_classes)
if validate:
print("Verifying val dataset")
verify_segmentation_dataset(val_images, val_annotations, n_classes)
train_gen = image_segmentation_generator_with_weighted_output(train_images, train_annotations, batch_size, n_classes, input_height,
input_width, output_height, output_width)
if validate:
val_gen = image_segmentation_generator_with_weighted_output(val_images, val_annotations, val_batch_size, n_classes, input_height,
input_width, output_height, output_width)
if not validate:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
else:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, validation_data=val_gen, validation_steps=200, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
def train_temporal_with_weighted_output(model,
train_images,
train_annotations,
input_height=None,
input_width=None,
n_classes=None,
verify_dataset=True,
checkpoints_path=None,
epochs=5,
batch_size=2,
validate=False,
val_images=None,
val_annotations=None,
val_batch_size=2,
auto_resume_checkpoint=False,
load_weights=None,
steps_per_epoch=512,
optimizer_name='adam'
):
if isinstance(model, six.string_types): # check if user gives model name insteead of the model object
# create the model from the name
assert (not n_classes is None), "Please provide the n_classes"
if (not input_height is None) and (not input_width is None):
model = model_from_name[model](n_classes, input_height=input_height, input_width=input_width)
else:
model = model_from_name[model](n_classes)
n_classes = model.n_classes
input_height = model.input_height
input_width = model.input_width
output_height = model.output_height
output_width = model.output_width
if validate:
assert not (val_images is None)
assert not (val_annotations is None)
if not optimizer_name is None:
# model.compile(loss='categorical_crossentropy',
# optimizer= optimizer_name ,
# metrics=['accuracy'])
# model.compile(loss=[custom_losses.categorical_focal_loss_with_iou(alpha=0.50, gamma=1.25, model=model)],
# optimizer=optimizer_name,
# metrics=['accuracy', iou_metric_wrapper(output_height, output_width, n_classes)])
model.compile(loss={
"main_output_activation": custom_losses.categorical_focal_loss_with_iou(alpha=0.50, gamma=1.25,
model=model),
"second_output_activation": smooth_l1_loss,
},
optimizer=optimizer_name,
metrics=['accuracy'])
if not checkpoints_path is None:
open(checkpoints_path + "_config.json", "w").write(json.dumps({
"model_class": model.model_name,
"n_classes": n_classes,
"input_height": input_height,
"input_width": input_width,
"output_height": output_height,
"output_width": output_width
}))
if (not (load_weights is None)) and len(load_weights) > 0:
print("Loading weights from ", load_weights)
model.load_weights(load_weights)
if auto_resume_checkpoint and (not checkpoints_path is None):
latest_checkpoint = find_latest_checkpoint(checkpoints_path)
if not latest_checkpoint is None:
print("Loading the weights from latest checkpoint ", latest_checkpoint)
model.load_weights(latest_checkpoint)
if verify_dataset:
print("Verifying train dataset")
verify_segmentation_dataset(train_images, train_annotations, n_classes)
if validate:
print("Verifying val dataset")
verify_segmentation_dataset(val_images, val_annotations, n_classes)
train_gen = image_segmentation_temporal_generator_with_weighted_output(train_images, train_annotations, batch_size,
n_classes, input_height,
input_width, output_height, output_width)
if validate:
val_gen = image_segmentation_temporal_generator_with_weighted_output(val_images, val_annotations,
val_batch_size,
n_classes, input_height,
input_width, output_height, output_width)
if not validate:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
else:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, validation_data=val_gen, validation_steps=200, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
def train_bounding_box_based_network(model, train_images, train_annotations, input_height=None, input_width=None,
n_classes=None, verify_dataset=True, checkpoints_path=None, epochs=5, batch_size=2,
validate=False, val_images=None, val_annotations=None, val_batch_size=2,
auto_resume_checkpoint=False, load_weights=None, steps_per_epoch=512,
optimizer_name='adam', optimizer_lr=0.001, optimizer_decay=0.001):
if isinstance(model, six.string_types): # check if user gives model name insteead of the model object
# create the model from the name
assert (not n_classes is None), "Please provide the n_classes"
if (not input_height is None) and (not input_width is None):
model = model_from_name[model](n_classes, input_height=input_height, input_width=input_width)
else:
model = model_from_name[model](n_classes)
n_classes = model.n_classes
input_height = model.input_height
input_width = model.input_width
# output_height = model.output_height
# output_width = model.output_width
output_height = input_height
output_width = input_width
if validate:
assert not (val_images is None)
assert not (val_annotations is None)
if optimizer_name is not None:
# model.compile(loss='categorical_crossentropy',
# optimizer= optimizer_name ,
# metrics=['accuracy'])
adam = optimizers.Adam(lr=optimizer_lr, decay=optimizer_decay)
model.compile(loss=bounding_box_based_network_loss_gpu,
optimizer=adam,
metrics=['accuracy'])
if checkpoints_path is not None:
open(checkpoints_path + "_config.json", "w").write(json.dumps({
"model_class": model.model_name,
"n_classes": n_classes,
"input_height": input_height,
"input_width": input_width,
"output_height": output_height,
"output_width": output_width
}))
if (not (load_weights is None)) and len(load_weights) > 0:
print("Loading weights from ", load_weights)
model.load_weights(load_weights)
if auto_resume_checkpoint and (not checkpoints_path is None):
latest_checkpoint = find_latest_checkpoint(checkpoints_path)
if not latest_checkpoint is None:
print("Loading the weights from latest checkpoint ", latest_checkpoint)
model.load_weights(latest_checkpoint)
if verify_dataset:
print("Verifying train dataset")
verify_segmentation_dataset(train_images, train_annotations, n_classes)
if validate:
print("Verifying val dataset")
verify_segmentation_dataset(val_images, val_annotations, n_classes)
train_gen = image_segmentation_generator_bounding_box_based_network(train_images, train_annotations, batch_size, n_classes, input_height,
input_width, output_height, output_width)
if validate:
val_gen = image_segmentation_generator_bounding_box_based_network(val_images, val_annotations, val_batch_size, n_classes, input_height,
input_width, output_height, output_width)
if not validate:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
else:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, validation_data=val_gen, validation_steps=200, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
def train_bounding_box_iou_based_network(model, train_images, train_annotations, input_height=None, input_width=None,
n_classes=None, verify_dataset=True, checkpoints_path=None, epochs=5,
batch_size=2,
validate=False, val_images=None, val_annotations=None, val_batch_size=2,
auto_resume_checkpoint=False, load_weights=None, steps_per_epoch=512,
optimizer_name='adam', optimizer_lr=0.001, optimizer_decay=0.001):
if isinstance(model, six.string_types): # check if user gives model name insteead of the model object
# create the model from the name
assert (not n_classes is None), "Please provide the n_classes"
if (not input_height is None) and (not input_width is None):
model = model_from_name[model](n_classes, input_height=input_height, input_width=input_width)
else:
model = model_from_name[model](n_classes)
n_classes = model.n_classes
input_height = model.input_height
input_width = model.input_width
# output_height = model.output_height
# output_width = model.output_width
output_height = input_height
output_width = input_width
if validate:
assert not (val_images is None)
assert not (val_annotations is None)
if optimizer_name is not None:
# model.compile(loss='categorical_crossentropy',
# optimizer= optimizer_name ,
# metrics=['accuracy'])
adam = optimizers.Adam(lr=optimizer_lr, decay=optimizer_decay)
model.compile(loss=bounding_box_iou_based_network_loss,
optimizer=adam,
metrics=['accuracy', bounding_box_iou_based_network_metric])
if checkpoints_path is not None:
open(checkpoints_path + "_config.json", "w").write(json.dumps({
"model_class": model.model_name,
"n_classes": n_classes,
"input_height": input_height,
"input_width": input_width,
"output_height": output_height,
"output_width": output_width
}))
if (not (load_weights is None)) and len(load_weights) > 0:
print("Loading weights from ", load_weights)
model.load_weights(load_weights)
if auto_resume_checkpoint and (not checkpoints_path is None):
latest_checkpoint = find_latest_checkpoint(checkpoints_path)
if not latest_checkpoint is None:
print("Loading the weights from latest checkpoint ", latest_checkpoint)
model.load_weights(latest_checkpoint)
if verify_dataset:
print("Verifying train dataset")
verify_segmentation_dataset(train_images, train_annotations, n_classes)
if validate:
print("Verifying val dataset")
verify_segmentation_dataset(val_images, val_annotations, n_classes)
train_gen = image_segmentation_generator_bounding_box_iou_based_network(train_images, train_annotations, batch_size,
n_classes, input_height,
input_width, output_height, output_width)
if validate:
val_gen = image_segmentation_generator_bounding_box_iou_based_network(val_images, val_annotations,
val_batch_size,
n_classes, input_height,
input_width, output_height, output_width)
if not validate:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
else:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, validation_data=val_gen, validation_steps=200, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
def train_IoU_network(model,
train_images,
train_annotations,
input_height=None,
input_width=None,
n_classes=None,
verify_dataset=True,
checkpoints_path=None,
epochs=5,
batch_size=2,
validate=False,
val_images=None,
val_annotations=None,
val_batch_size=2,
auto_resume_checkpoint=False,
load_weights=None,
steps_per_epoch=512,
optimizer_name='adam'
):
if isinstance(model, six.string_types): # check if user gives model name insteead of the model object
# create the model from the name
assert (not n_classes is None), "Please provide the n_classes"
if (not input_height is None) and (not input_width is None):
model = model_from_name[model](n_classes, input_height=input_height, input_width=input_width)
else:
model = model_from_name[model](n_classes)
n_classes = model.n_classes
input_height = model.input_height
input_width = model.input_width
# output_height = model.output_height
# output_width = model.output_width
output_height = None
output_width = None
if validate:
assert not (val_images is None)
assert not (val_annotations is None)
if not optimizer_name is None:
# model.compile(loss='categorical_crossentropy',
# optimizer= optimizer_name ,
# metrics=['accuracy'])
model.compile(loss=custom_losses.smooth_l1_loss,
optimizer=optimizer_name,
metrics=['accuracy'])
if not checkpoints_path is None:
open(checkpoints_path + "_config.json", "w").write(json.dumps({
"model_class": model.model_name,
"n_classes": n_classes,
"input_height": input_height,
"input_width": input_width,
"output_height": output_height,
"output_width": output_width
}))
if (not (load_weights is None)) and len(load_weights) > 0:
print("Loading weights from ", load_weights)
model.load_weights(load_weights)
if auto_resume_checkpoint and (not checkpoints_path is None):
latest_checkpoint = find_latest_checkpoint(checkpoints_path)
if not latest_checkpoint is None:
print("Loading the weights from latest checkpoint ", latest_checkpoint)
model.load_weights(latest_checkpoint)
if verify_dataset:
print("Verifying train dataset")
verify_segmentation_dataset(train_images, train_annotations, n_classes)
if validate:
print("Verifying val dataset")
verify_segmentation_dataset(val_images, val_annotations, n_classes)
train_gen = IoU_network_image_segmentation_generator(train_images, train_annotations, batch_size, n_classes,
input_height,
input_width, output_height, output_width)
if validate:
val_gen = IoU_network_image_segmentation_generator(val_images, val_annotations, val_batch_size, n_classes,
input_height,
input_width, output_height, output_width)
if not validate:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
else:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, validation_data=val_gen, validation_steps=200, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
def train_i3d(model,
train_images,
train_annotations,
input_height=None,
input_width=None,
n_classes=None,
verify_dataset=True,
checkpoints_path=None,
epochs=5,
batch_size=2,
validate=False,
val_images=None,
val_annotations=None,
val_batch_size=2,
auto_resume_checkpoint=False,
load_weights=None,
steps_per_epoch=512,
optimizer_name='adam',
lr_custom=0.001,
lr_decay=0.0
):
if isinstance(model, six.string_types): # check if user gives model name insteead of the model object
# create the model from the name
assert (not n_classes is None), "Please provide the n_classes"
if (not input_height is None) and (not input_width is None):
model = model_from_name[model](n_classes, input_height=input_height, input_width=input_width)
else:
model = model_from_name[model](n_classes)
n_classes = model.n_classes
input_height = model.input_height
input_width = model.input_width
output_height = model.output_height
output_width = model.output_width
if validate:
assert not (val_images is None)
assert not (val_annotations is None)
if not optimizer_name is None:
# model.compile(loss='categorical_crossentropy',
# optimizer= optimizer_name ,
# metrics=['accuracy'])
adam = optimizers.Adam(lr=lr_custom, beta_1=0.9, beta_2=0.999, decay=lr_decay)
model.compile(loss={
"main_output_activation": custom_losses.categorical_focal_loss_with_iou(alpha=0.50, gamma=1.25,
model=model),
"second_output_activation": smooth_l1_loss,
},
optimizer=adam,
metrics=['accuracy'])
if not checkpoints_path is None:
open(checkpoints_path + "_config.json", "w").write(json.dumps({
"model_class": model.model_name,
"n_classes": n_classes,
"input_height": input_height,
"input_width": input_width,
"output_height": output_height,
"output_width": output_width
}))
if (not (load_weights is None)) and len(load_weights) > 0:
print("Loading weights from ", load_weights)
model.load_weights(load_weights)
if auto_resume_checkpoint and (not checkpoints_path is None):
latest_checkpoint = find_latest_checkpoint(checkpoints_path)
if not latest_checkpoint is None:
print("Loading the weights from latest checkpoint ", latest_checkpoint)
model.load_weights(latest_checkpoint)
if verify_dataset:
print("Verifying train dataset")
verify_segmentation_dataset(train_images, train_annotations, n_classes)
if validate:
print("Verifying val dataset")
verify_segmentation_dataset(val_images, val_annotations, n_classes)
train_gen = image_segmentation_generator_i3d(train_images, train_annotations, batch_size, n_classes, input_height,
input_width, output_height, output_width)
if validate:
val_gen = image_segmentation_generator_i3d(val_images, val_annotations, val_batch_size, n_classes, input_height,
input_width, output_height, output_width)
# reduce_lr = ReduceLROnPlateau(monitor='loss', factor=0.5, patience=3, min_lr=1e-6, verbose=1)
if not validate:
for ep in range(epochs):
print("Starting Epoch ", ep)
# model.fit_generator(train_gen, steps_per_epoch, epochs=1, callbacks=[reduce_lr])
model.fit_generator(train_gen, steps_per_epoch, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
else:
for ep in range(epochs):
print("Starting Epoch ", ep)
# model.fit_generator(train_gen, steps_per_epoch, validation_data=val_gen, validation_steps=200, epochs=1,
# callbacks=[reduce_lr])
model.fit_generator(train_gen, steps_per_epoch, validation_data=val_gen, validation_steps=200, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
def train_two_stream(model,
train_images,
train_flows,
train_annotations,
input_height=None,
input_width=None,
n_classes=None,
verify_dataset=True,
checkpoints_path=None,
epochs=5,
batch_size=2,
validate=False,
val_images=None,
val_flows=None,
val_annotations=None,
val_batch_size=2,
auto_resume_checkpoint=False,
load_weights=None,
steps_per_epoch=512,
optimizer_name='adam'
):
if isinstance(model, six.string_types): # check if user gives model name insteead of the model object
# create the model from the name
assert (not n_classes is None), "Please provide the n_classes"
if (not input_height is None) and (not input_width is None):
model = model_from_name[model](n_classes, input_height=input_height, input_width=input_width)
else:
model = model_from_name[model](n_classes)
n_classes = model.n_classes
input_height = model.input_height
input_width = model.input_width
output_height = model.output_height
output_width = model.output_width
if validate:
assert not (val_images is None)
assert not (val_flows is None)
assert not (val_annotations is None)
if not optimizer_name is None:
model.compile(loss='categorical_crossentropy',
optimizer=optimizer_name,
metrics=['accuracy'])
if not checkpoints_path is None:
open(checkpoints_path + "_config.json", "w").write(json.dumps({
"model_class": model.model_name,
"n_classes": n_classes,
"input_height": input_height,
"input_width": input_width,
"output_height": output_height,
"output_width": output_width
}))
if (not (load_weights is None)) and len(load_weights) > 0:
print("Loading weights from ", load_weights)
model.load_weights(load_weights)
if auto_resume_checkpoint and (not checkpoints_path is None):
latest_checkpoint = find_latest_checkpoint(checkpoints_path)
if not latest_checkpoint is None:
print("Loading the weights from latest checkpoint ", latest_checkpoint)
model.load_weights(latest_checkpoint)
if verify_dataset:
print("Verifying train dataset")
two_stream_verify_segmentation_dataset(train_images, train_flows, train_annotations, n_classes)
if validate:
print("Verifying val dataset")
two_stream_verify_segmentation_dataset(val_images, train_flows, val_annotations, n_classes)
train_gen = two_stream_image_segmentation_generator(train_images, train_flows, train_annotations, batch_size,
n_classes, input_height, input_width, output_height,
output_width)
if validate:
val_gen = two_stream_image_segmentation_generator(val_images, val_flows, val_annotations, val_batch_size,
n_classes, input_height, input_width, output_height,
output_width)
if not validate:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
else:
for ep in range(epochs):
print("Starting Epoch ", ep)
model.fit_generator(train_gen, steps_per_epoch, validation_data=val_gen, validation_steps=200, epochs=1)
if not checkpoints_path is None:
model.save_weights(checkpoints_path + "." + str(ep))
print("saved ", checkpoints_path + ".model." + str(ep))
## replace_previous_checkpoint_with_empty_file(checkpoints_path, ep)
print("Finished Epoch", ep)
| 45.447368
| 143
| 0.60396
| 4,972
| 44,902
| 5.123492
| 0.033588
| 0.038
| 0.03957
| 0.033564
| 0.967732
| 0.956544
| 0.9293
| 0.925336
| 0.921096
| 0.92086
| 0
| 0.00722
| 0.321389
| 44,902
| 987
| 144
| 45.493414
| 0.828788
| 0.096098
| 0
| 0.891645
| 0
| 0
| 0.074991
| 0.004767
| 0
| 0
| 0
| 0
| 0.036554
| 1
| 0.01436
| false
| 0
| 0.015666
| 0
| 0.031332
| 0.117493
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2b5bc4936d4c5a1852062ffd2ca2dae245bcdb3d
| 18,978
|
py
|
Python
|
neural_network_lyapunov/examples/car/test/test_unicycle.py
|
hongkai-dai/neural-network-lyapunov-1
|
8843c13f69f7f39cbb939ab250413e76f61843f6
|
[
"MIT"
] | 58
|
2021-06-21T08:59:52.000Z
|
2022-03-31T14:35:23.000Z
|
neural_network_lyapunov/examples/car/test/test_unicycle.py
|
StanfordASL/neural-network-lyapunov
|
9e5db1c7f91b42df729026c9aa8575bc126f66b6
|
[
"MIT"
] | 8
|
2021-08-22T05:31:23.000Z
|
2022-03-29T03:47:07.000Z
|
neural_network_lyapunov/examples/car/test/test_unicycle.py
|
StanfordASL/neural-network-lyapunov
|
9e5db1c7f91b42df729026c9aa8575bc126f66b6
|
[
"MIT"
] | 11
|
2021-06-21T04:29:59.000Z
|
2022-03-30T05:54:43.000Z
|
import neural_network_lyapunov.examples.car.unicycle as unicycle
import neural_network_lyapunov.utils as utils
import neural_network_lyapunov.gurobi_torch_mip as gurobi_torch_mip
import unittest
import numpy as np
import torch
import scipy.integrate
import scipy.linalg
import gurobipy
class TestUnicycle(unittest.TestCase):
def test_dynamics(self):
plant = unicycle.Unicycle(torch.float64)
# Test with pytorch tensor.
x = torch.tensor([2., 3., 0.5], dtype=torch.float64)
u = torch.tensor([0.5, -0.2], dtype=torch.float64)
xdot_torch = plant.dynamics(x, u)
np.testing.assert_allclose(
xdot_torch.detach().numpy(),
np.array([u[0] * torch.cos(x[2]), u[0] * torch.sin(x[2]), u[1]]))
xdot_np = plant.dynamics(x.detach().numpy(), u.detach().numpy())
np.testing.assert_allclose(xdot_torch.detach().numpy(), xdot_np)
def test_dynamics_gradient(self):
plant = unicycle.Unicycle(torch.float64)
def tester(x_val: np.ndarray, u_val: np.ndarray):
A, B = plant.dynamics_gradient(x_val, u_val)
A_torch, B_torch = plant.dynamics_gradient(torch.from_numpy(x_val),
torch.from_numpy(u_val))
np.testing.assert_allclose(A, A_torch.detach().numpy())
np.testing.assert_allclose(B, B_torch.detach().numpy())
"""
Compute gradint through pytorch autograd.
"""
x_torch = torch.from_numpy(x_val)
x_torch.requires_grad = True
u_torch = torch.from_numpy(u_val)
u_torch.requires_grad = True
for i in range(3):
if x_torch.grad is not None:
x_torch.grad.zero_()
if u_torch.grad is not None:
u_torch.grad.zero_()
xdot = plant.dynamics(x_torch, u_torch)
xdot[i].backward()
np.testing.assert_allclose(A_torch[i].detach().numpy(),
x_torch.grad.detach().numpy())
np.testing.assert_allclose(B_torch[i].detach().numpy(),
u_torch.grad.detach().numpy())
tester(np.array([0.5, 0.4, 0.2]), np.array([-0.3, 0.8]))
tester(np.array([-0.5, 0.7, -2.2]), np.array([-1.3, -.8]))
tester(np.array([-2.5, 0.7, -1.5]), np.array([-1.9, -.8]))
def test_next_pose(self):
plant = unicycle.Unicycle(torch.float64)
x = torch.tensor([2., 3., 0.5], dtype=torch.float64)
u = torch.tensor([0.5, -0.2], dtype=torch.float64)
x_next = plant.next_pose(x, u, 0.1)
result = scipy.integrate.solve_ivp(
lambda t, x_val: plant.dynamics(x_val,
u.detach().numpy()), [0, 0.1],
x.detach().numpy())
np.testing.assert_allclose(x_next, result.y[:, -1])
class TestUnicycleReLUModel(unittest.TestCase):
def setUp(self):
self.dtype = torch.float64
# Arbitrarily initialize the relu network. All the tests should pass
# even if the network doesn't approximate the unicycle dynamics.
dynamics_relu_no_thetadot = utils.setup_relu((2, 4, 3, 2),
params=None,
negative_slope=0.1,
bias=True,
dtype=self.dtype)
dynamics_relu_no_thetadot[0].weight.data = torch.tensor(
[[0.2, 0.5], [-1.3, 0.5], [-0.3, -0.2], [-0.4, -1.4]],
dtype=self.dtype)
dynamics_relu_no_thetadot[0].bias.data = torch.tensor(
[0.4, -1.2, 0.1, 2.3], dtype=self.dtype)
dynamics_relu_no_thetadot[2].weight.data = torch.tensor(
[[0.4, 0.1, -1.4, 0.2], [0.1, -0.2, -0.5, -1.1],
[0.3, 0.5, 1.1, -0.2]],
dtype=self.dtype)
dynamics_relu_no_thetadot[2].bias.data = torch.tensor([0.2, 0.1, -0.3],
dtype=self.dtype)
dynamics_relu_no_thetadot[4].weight.data = torch.tensor(
[[0.1, -0.3, 0.5], [0.3, -0.2, 2.1]], dtype=self.dtype)
dynamics_relu_no_thetadot[4].bias.data = torch.tensor([0.4, -1.2],
dtype=self.dtype)
self.dut_thetadot_not_input = unicycle.UnicycleReLUModel(
self.dtype,
x_lo=torch.tensor([-3, -3, -np.pi], dtype=self.dtype),
x_up=torch.tensor([3, 3, np.pi], dtype=self.dtype),
u_lo=torch.tensor([-2, -0.5], dtype=self.dtype),
u_up=torch.tensor([5, 0.5], dtype=self.dtype),
dynamics_relu=dynamics_relu_no_thetadot,
dt=0.01,
thetadot_as_input=False)
dynamics_relu_thetadot = utils.setup_relu((3, 4, 3, 2),
params=None,
negative_slope=0.1,
bias=True,
dtype=self.dtype)
dynamics_relu_thetadot[0].weight.data = torch.tensor(
[[0.2, 0.5, 0.1], [-1.3, 0.5, -1.2], [-0.3, -0.2, 0.4],
[-0.4, -1.4, 0.5]],
dtype=self.dtype)
dynamics_relu_no_thetadot[0].bias.data = torch.tensor(
[0.4, -1.2, 0.1, 2.3], dtype=self.dtype)
dynamics_relu_thetadot[2].weight.data = dynamics_relu_no_thetadot[
2].weight.data
dynamics_relu_thetadot[2].bias.data = dynamics_relu_no_thetadot[
2].bias.data
dynamics_relu_thetadot[4].weight.data = dynamics_relu_no_thetadot[
4].weight.data
dynamics_relu_thetadot[4].bias.data = dynamics_relu_thetadot[
4].bias.data
self.dut_thetadot_input = unicycle.UnicycleReLUModel(
self.dtype,
x_lo=torch.tensor([-3, -3, -np.pi], dtype=self.dtype),
x_up=torch.tensor([3, 3, np.pi], dtype=self.dtype),
u_lo=torch.tensor([-2, -0.5], dtype=self.dtype),
u_up=torch.tensor([5, 0.5], dtype=self.dtype),
dynamics_relu=dynamics_relu_thetadot,
dt=0.01,
thetadot_as_input=True)
def step_forward_tester(self, dut):
# First test a single x_start and u_start
x_start = torch.tensor([0.2, 0.5, -0.1], dtype=self.dtype)
u_start = torch.tensor([2.1, 0.3], dtype=self.dtype)
x_next = dut.step_forward(x_start, u_start)
def eval_next_state(x_val, u_val):
if dut.thetadot_as_input:
network_input = torch.tensor([x_val[2], u_val[0], u_val[1]],
dtype=self.dtype)
network_input_zero = torch.zeros((3,), dtype=self.dtype)
else:
network_input = torch.tensor([x_val[2], u_val[0]],
dtype=self.dtype)
network_input_zero = torch.zeros((2,), dtype=self.dtype)
position_next = x_val[:2] + \
dut.dynamics_relu(network_input) - dut.dynamics_relu(
network_input_zero)
theta_next = x_val[2] + u_val[1] * dut.dt
return np.array([
position_next[0].item(), position_next[1].item(),
theta_next.item()
])
np.testing.assert_allclose(x_next.detach().numpy(),
eval_next_state(x_start, u_start))
# Now test a batch of x_start and u_start
x_start = torch.tensor([[0.2, 0.5, -0.1], [0.4, 0.3, 0.5]],
dtype=self.dtype)
u_start = torch.tensor([[2.1, 0.3], [-0.3, 0.4]], dtype=self.dtype)
x_next = dut.step_forward(x_start, u_start)
self.assertEqual(x_next.shape, (2, 3))
for i in range(x_start.shape[0]):
np.testing.assert_allclose(x_next[i].detach().numpy(),
eval_next_state(x_start[i], u_start[i]))
def test_step_forward_thetadot_not_input(self):
self.step_forward_tester(self.dut_thetadot_not_input)
def test_step_forward_thetadot_as_input(self):
self.step_forward_tester(self.dut_thetadot_input)
def add_dynamics_constraint_tester(self, dut):
def tester(x_val, u_val):
# Setup an MILP with fixed x_var and u_var, check if x_next_var is
# solved to the right value.
mip = gurobi_torch_mip.GurobiTorchMILP(self.dtype)
x_var = mip.addVars(3, lb=-gurobipy.GRB.INFINITY)
u_var = mip.addVars(2, lb=-gurobipy.GRB.INFINITY)
x_next_var = mip.addVars(3, lb=-gurobipy.GRB.INFINITY)
dut.add_dynamics_constraint(mip, x_var, x_next_var, u_var, "slack",
"binary")
# Fix x_var to x_val, u_var to u_val
mip.addMConstrs([torch.eye(3, dtype=self.dtype)], [x_var],
sense=gurobipy.GRB.EQUAL,
b=x_val)
mip.addMConstrs([torch.eye(2, dtype=self.dtype)], [u_var],
sense=gurobipy.GRB.EQUAL,
b=u_val)
mip.gurobi_model.setParam(gurobipy.GRB.Param.OutputFlag, False)
mip.gurobi_model.optimize()
self.assertEqual(mip.gurobi_model.status,
gurobipy.GRB.Status.OPTIMAL)
x_next_val = np.array([var.xn for var in x_next_var])
x_next_val_expected = dut.step_forward(x_val, u_val)
np.testing.assert_allclose(x_next_val,
x_next_val_expected.detach().numpy(),
atol=1e-8)
tester(torch.tensor([0., 0., 0.], dtype=self.dtype),
torch.tensor([0., 0.], dtype=self.dtype))
tester(torch.tensor([0.5, -0.3, 0.4], dtype=self.dtype),
torch.tensor([0., 0.], dtype=self.dtype))
tester(torch.tensor([0.6, -1.3, 0.4], dtype=self.dtype),
torch.tensor([4., 0.3], dtype=self.dtype))
tester(torch.tensor([0.6, -1.3, 0.4], dtype=self.dtype),
torch.tensor([-2., 0.3], dtype=self.dtype))
def test_add_dynamics_constraint_thetadot_not_input(self):
self.add_dynamics_constraint_tester(self.dut_thetadot_not_input)
def test_add_dynamics_constraint_thetadot_as_input(self):
self.add_dynamics_constraint_tester(self.dut_thetadot_input)
class TestUnicycleReLUZeroVelModel(unittest.TestCase):
def setUp(self):
self.dtype = torch.float64
# Arbitrarily initialize the relu network. All the tests should pass
# even if the network doesn't approximate the unicycle dynamics.
dynamics_relu_no_thetadot = utils.setup_relu((2, 4, 3, 2),
params=None,
negative_slope=0.1,
bias=True,
dtype=self.dtype)
dynamics_relu_no_thetadot[0].weight.data = torch.tensor(
[[0.2, 0.5], [-1.3, 0.5], [-0.3, -0.2], [-0.4, -1.4]],
dtype=self.dtype)
dynamics_relu_no_thetadot[0].bias.data = torch.tensor(
[0.4, -1.2, 0.1, 2.3], dtype=self.dtype)
dynamics_relu_no_thetadot[2].weight.data = torch.tensor(
[[0.4, 0.1, -1.4, 0.2], [0.1, -0.2, -0.5, -1.1],
[0.3, 0.5, 1.1, -0.2]],
dtype=self.dtype)
dynamics_relu_no_thetadot[2].bias.data = torch.tensor([0.2, 0.1, -0.3],
dtype=self.dtype)
dynamics_relu_no_thetadot[4].weight.data = torch.tensor(
[[0.1, -0.3, 0.5], [0.3, -0.2, 2.1]], dtype=self.dtype)
dynamics_relu_no_thetadot[4].bias.data = torch.tensor([0.4, -1.2],
dtype=self.dtype)
self.dut_thetadot_not_input = unicycle.UnicycleReLUZeroVelModel(
self.dtype,
x_lo=torch.tensor([-3, -3, -np.pi], dtype=self.dtype),
x_up=torch.tensor([3, 3, np.pi], dtype=self.dtype),
u_lo=torch.tensor([-2, -0.5], dtype=self.dtype),
u_up=torch.tensor([5, 0.5], dtype=self.dtype),
dynamics_relu=dynamics_relu_no_thetadot,
dt=0.01,
thetadot_as_input=False)
dynamics_relu_thetadot = utils.setup_relu((3, 4, 3, 2),
params=None,
negative_slope=0.1,
bias=True,
dtype=self.dtype)
dynamics_relu_thetadot[0].weight.data = torch.tensor(
[[0.2, 0.5, 0.1], [-1.3, 0.5, -1.2], [-0.3, -0.2, 0.4],
[-0.4, -1.4, 0.5]],
dtype=self.dtype)
dynamics_relu_no_thetadot[0].bias.data = torch.tensor(
[0.4, -1.2, 0.1, 2.3], dtype=self.dtype)
dynamics_relu_thetadot[2].weight.data = dynamics_relu_no_thetadot[
2].weight.data
dynamics_relu_thetadot[2].bias.data = dynamics_relu_no_thetadot[
2].bias.data
dynamics_relu_thetadot[4].weight.data = dynamics_relu_no_thetadot[
4].weight.data
dynamics_relu_thetadot[4].bias.data = dynamics_relu_thetadot[
4].bias.data
self.dut_thetadot_input = unicycle.UnicycleReLUZeroVelModel(
self.dtype,
x_lo=torch.tensor([-3, -3, -np.pi], dtype=self.dtype),
x_up=torch.tensor([3, 3, np.pi], dtype=self.dtype),
u_lo=torch.tensor([-2, -0.5], dtype=self.dtype),
u_up=torch.tensor([5, 0.5], dtype=self.dtype),
dynamics_relu=dynamics_relu_thetadot,
dt=0.01,
thetadot_as_input=True)
def step_forward_tester(self, dut):
# First make sure that if vel = 0, then pos[n+1] = pos[n]
x_start = torch.tensor([0.5, 0.3, -1.2], dtype=self.dtype)
u_start = torch.tensor([0, 0.5], dtype=self.dtype)
np.testing.assert_allclose(
dut.step_forward(x_start, u_start)[:2].detach().numpy(),
x_start[:2].detach().numpy())
# First test a single x_start and u_start
x_start = torch.tensor([0.2, 0.5, -0.1], dtype=self.dtype)
u_start = torch.tensor([2.1, 0.3], dtype=self.dtype)
x_next = dut.step_forward(x_start, u_start)
def eval_next_state(x_val, u_val):
if dut.thetadot_as_input:
network_input = torch.tensor([x_val[2], u_val[0], u_val[1]],
dtype=self.dtype)
network_input_zero_vel = torch.tensor([x_val[2], 0, u_val[1]],
dtype=self.dtype)
else:
network_input = torch.tensor([x_val[2], u_val[0]],
dtype=self.dtype)
network_input_zero_vel = torch.tensor([x_val[2], 0],
dtype=self.dtype)
position_next = x_val[:2] + \
dut.dynamics_relu(network_input) - dut.dynamics_relu(
network_input_zero_vel)
theta_next = x_val[2] + u_val[1] * dut.dt
return np.array([
position_next[0].item(), position_next[1].item(),
theta_next.item()
])
np.testing.assert_allclose(x_next.detach().numpy(),
eval_next_state(x_start, u_start))
# Now test a batch of x_start and u_start
x_start = torch.tensor([[0.2, 0.5, -0.1], [0.4, 0.3, 0.5]],
dtype=self.dtype)
u_start = torch.tensor([[2.1, 0.3], [-0.3, 0.4]], dtype=self.dtype)
x_next = dut.step_forward(x_start, u_start)
self.assertEqual(x_next.shape, (2, 3))
for i in range(x_start.shape[0]):
np.testing.assert_allclose(x_next[i].detach().numpy(),
eval_next_state(x_start[i], u_start[i]))
def test_step_forward_thetadot_not_input(self):
self.step_forward_tester(self.dut_thetadot_not_input)
def test_step_forward_thetadot_as_input(self):
self.step_forward_tester(self.dut_thetadot_input)
def add_dynamics_constraint_tester(self, dut):
def tester(x_val, u_val):
# Setup an MILP with fixed x_var and u_var, check if x_next_var is
# solved to the right value.
mip = gurobi_torch_mip.GurobiTorchMILP(self.dtype)
x_var = mip.addVars(3, lb=-gurobipy.GRB.INFINITY)
u_var = mip.addVars(2, lb=-gurobipy.GRB.INFINITY)
x_next_var = mip.addVars(3, lb=-gurobipy.GRB.INFINITY)
dut.add_dynamics_constraint(mip, x_var, x_next_var, u_var, "slack",
"binary")
# Fix x_var to x_val, u_var to u_val
mip.addMConstrs([torch.eye(3, dtype=self.dtype)], [x_var],
sense=gurobipy.GRB.EQUAL,
b=x_val)
mip.addMConstrs([torch.eye(2, dtype=self.dtype)], [u_var],
sense=gurobipy.GRB.EQUAL,
b=u_val)
mip.gurobi_model.setParam(gurobipy.GRB.Param.OutputFlag, False)
mip.gurobi_model.optimize()
self.assertEqual(mip.gurobi_model.status,
gurobipy.GRB.Status.OPTIMAL)
x_next_val = np.array([var.xn for var in x_next_var])
x_next_val_expected = dut.step_forward(x_val, u_val)
np.testing.assert_allclose(x_next_val,
x_next_val_expected.detach().numpy(),
atol=1e-8)
tester(torch.tensor([0., 0., 0.], dtype=self.dtype),
torch.tensor([0., 0.], dtype=self.dtype))
tester(torch.tensor([0.5, -0.3, 0.4], dtype=self.dtype),
torch.tensor([0., 0.], dtype=self.dtype))
tester(torch.tensor([0.6, -1.3, 0.4], dtype=self.dtype),
torch.tensor([4., 0.3], dtype=self.dtype))
tester(torch.tensor([0.6, -1.3, 0.4], dtype=self.dtype),
torch.tensor([-2., 0.3], dtype=self.dtype))
def test_add_dynamics_constraint_thetadot_not_input(self):
self.add_dynamics_constraint_tester(self.dut_thetadot_not_input)
def test_add_dynamics_constraint_thetadot_as_input(self):
self.add_dynamics_constraint_tester(self.dut_thetadot_input)
if __name__ == "__main__":
unittest.main()
| 49.165803
| 79
| 0.536832
| 2,541
| 18,978
| 3.795356
| 0.070051
| 0.076524
| 0.107424
| 0.054749
| 0.881688
| 0.865823
| 0.841456
| 0.827769
| 0.816777
| 0.816777
| 0
| 0.044866
| 0.332912
| 18,978
| 385
| 80
| 49.293506
| 0.716904
| 0.039783
| 0
| 0.796923
| 0
| 0
| 0.001654
| 0
| 0
| 0
| 0
| 0
| 0.055385
| 1
| 0.067692
| false
| 0
| 0.027692
| 0
| 0.110769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2b5f15fc1346fc0b8a38460066380ca206ff4637
| 2,850
|
py
|
Python
|
opp_function.py
|
LukeKort/OCM
|
674f73dcfeb86aa58d67fbe0abf8a997dff439a7
|
[
"MIT"
] | null | null | null |
opp_function.py
|
LukeKort/OCM
|
674f73dcfeb86aa58d67fbe0abf8a997dff439a7
|
[
"MIT"
] | null | null | null |
opp_function.py
|
LukeKort/OCM
|
674f73dcfeb86aa58d67fbe0abf8a997dff439a7
|
[
"MIT"
] | null | null | null |
# Objectives and constraints functions (Aug. 04, 2021)
import math
import numpy as np
def objective(var_o): #objetive functions Weibull
#confiabilidade
gamma_ =
theta =
t = int(math.floor(var_o.copy()))
r_t = math.exp(-((t/theta)**(gamma_)))
#custo
c_m = 1000
c_r = 2500
c_inc = 10000
t_ser = 87600
mttf =
c_t = (t_ser/t)*c_m*r_t + (t_ser/mttf)*(c_r+c_inc)*(1-r_t)
#função objetivo
y = c_t
return y
def constraints(var_c): #constraint functions
#confiabilidade
gamma_ =
theta =
lim = #limite
t = int(math.floor(var_c.copy()))
r_t = math.exp(-((t/theta)**(gamma_)))
#disponibilidade
t_m = #tempo de reparo
t_r = #tempo de manutenção
a_t = t/(t + r_t*t_m + (1-r_t)*t_r)
#Substituir r_t por a_t para usar função de confiabilidade como restrição
#constraint functions 1 to n
if (r_t >= lim): #test conditions 1 to n
return True #all conditions has been met
else:
return False #one or mor_t condition hasn't been met
def objective(var_o): #objetive functions Lognormal
#confiabilidade
mu = 5.9093828021596
sigma = 0.486238331177103
t = int(math.floor(var_o.copy()))
z = (mu - math.log(var_o))/sigma
termo_1 = ((4-math.pi)*abs(abs(z)) + math.sqrt(2*math.pi)*(math.pi-2))
termo_2 = (((4-math.pi)*math.sqrt(2*math.pi)*abs(z)**2)+(2*math.pi*abs(z))+(2*math.sqrt(2*math.pi)*(math.pi-2)))
termo_3 = math.exp(-(abs(z)**2)/2)
if z < 0:
r_t = 1-((termo_1/termo_2)*termo_3)
else:
r_t = 1 - (1-((termo_1/termo_2)*termo_3))
#custo
c_m = 1000
c_r = 2500
c_inc = 10000
t_ser = 87600
mttf = 413
c_t = (t_ser/t)*c_m*r_t + (t_ser/mttf)*(c_r+c_inc)*(1-r_t)
#função objetivo
y = c_t
return y
def constraints(var_c): #constraint functions
#confiabilidade
mu = 5.9093828021596
sigma = 0.486238331177103
t = int(math.floor(var_c.copy()))
z = (mu - math.log(var_c))/sigma
termo_1 = ((4-math.pi)*abs(abs(z)) + math.sqrt(2*math.pi)*(math.pi-2))
termo_2 = (((4-math.pi)*math.sqrt(2*math.pi)*abs(z)**2)+(2*math.pi*abs(z))+(2*math.sqrt(2*math.pi)*(math.pi-2)))
termo_3 = math.exp(-(abs(z)**2)/2)
if z < 0:
r_t = 1-((termo_1/termo_2)*termo_3)
else:
r_t = 1 - (1-((termo_1/termo_2)*termo_3))
#disponibilidade
t_m = 3 #tempo de reparo
t_r = 5 #tempo de manutenção
a_t = t/(t + r_t*t_m + (1-r_t)*t_r)
#Substituir r_t por a_t para usar função de confiabilidade como restrição
#constraint functions 1 to n
if (a_t >= 0.99): #test conditions 1 to n
return True #all conditions has been met
else:
return False #one or mor_t condition hasn't been met
| 21.755725
| 116
| 0.586316
| 489
| 2,850
| 3.241309
| 0.188139
| 0.021451
| 0.035331
| 0.049211
| 0.888328
| 0.869401
| 0.811356
| 0.783596
| 0.753312
| 0.753312
| 0
| 0.080114
| 0.264211
| 2,850
| 131
| 117
| 21.755725
| 0.675727
| 0.25193
| 0
| 0.80597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.029851
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2b6b5c1478793d7903c7916f3693d050a02befc1
| 3,293
|
py
|
Python
|
选拔赛/code/折线图2.py
|
775269512/WHUT_CUMCM20
|
a966c09e46c1789a86d4532f46d503a2226e0a47
|
[
"MIT"
] | 73
|
2020-09-20T15:39:26.000Z
|
2022-03-10T23:37:17.000Z
|
选拔赛/code/折线图2.py
|
HHHTTY/WHUT_CUMCM20
|
a966c09e46c1789a86d4532f46d503a2226e0a47
|
[
"MIT"
] | 3
|
2021-09-18T04:43:08.000Z
|
2021-12-02T08:10:53.000Z
|
选拔赛/code/折线图2.py
|
HHHTTY/WHUT_CUMCM20
|
a966c09e46c1789a86d4532f46d503a2226e0a47
|
[
"MIT"
] | 27
|
2020-09-20T15:39:29.000Z
|
2022-02-28T12:15:06.000Z
|
from pylab import *
mpl.rcParams['font.sans-serif'] = ['SimHei']
# import matplotlib.pyplot as plt
import numpy
import matplotlib.colors as colors
import matplotlib.cm as cmx
dicts = {"1":[3,2], "2":[1,5], "3":[5,4],"4":[4,7], "5":[0,8],"6":[3,11],"7":[7,9],
"8":[9,6],"9":[10,2], "10":[14,0],"11":[2,16], "12":[6,18],"13":[11,17],"14":[15,12],
"15":[19,9],"16":[22,5], "17":[21,0],"18":[27,9], "19":[15,19],"0":[10,10],}
x_axis_data = []
y_axis_data = []
cars = [12 ,11 ,0 ,15, 16, 0 ,5 ,2, 0, 14, 19, 13, 0, 4 ,6, 0, 18, 0 ,17, 10,0, 7, 0, 9, 1,0]
#0, 3, 8,
c_ = []
x = [0]
for j in range(len(cars)):
x.append(cars[j])
if cars[j]==0:
c_.append(x)
x = [0]
print(c_)
cmap = plt.cm.jet
cNorm = colors.Normalize(vmin=0, vmax=len(c_))
scalarMap = cmx.ScalarMappable(norm=cNorm, cmap=cmap)
for fff in range(len(c_)):
###########
x_axis_data = []
y_axis_data = []
road = c_[fff]
x_tem = []
y_tem = []
for i in range(len(road)):
x = str(road[i])
x_axis_data.append(dicts[x][0])
y_axis_data.append(dicts[x][1])
try:
x_tem.append(dicts[str(road[i + 1])][0])
y_tem.append(dicts[str(road[i])][1])
except:
pass
x_ = []
y_ = []
for i in range(len(x_tem)):
x_.append(x_axis_data[i])
y_.append(y_axis_data[i])
x_.append(x_tem[i])
y_.append(y_tem[i])
colorVal = scalarMap.to_rgba(fff)
x_.append(x_axis_data[i + 1])
y_.append(y_axis_data[i + 1])
plt.plot(x_, y_, 'ro-', alpha=0.8)
for i in range(0,len(x_)-1):
plt.arrow(x_[i], y_[i], x_[i+1] - x_[i], y_[i+1] - y_[i],
length_includes_head=True, head_width=0.3, lw=2,)
for x, y in zip(x_axis_data, y_axis_data):
plt.text(x, y + 0.3, '({},{})'.format(x, y),)
plt.xlabel('X轴/km')
plt.ylabel('Y轴/km')
cars = [3, 8,0]
#0, 3, 8,
c_ = []
x = [0]
for j in range(len(cars)):
x.append(cars[j])
if cars[j]==0:
c_.append(x)
x = [0]
print(c_)
cmap = plt.cm.jet
cNorm = colors.Normalize(vmin=0, vmax=len(c_))
scalarMap = cmx.ScalarMappable(norm=cNorm, cmap=cmap)
for fff in range(len(c_)):
###########
x_axis_data = []
y_axis_data = []
road = c_[fff]
x_tem = []
y_tem = []
for i in range(len(road)):
x = str(road[i])
x_axis_data.append(dicts[x][0])
y_axis_data.append(dicts[x][1])
try:
x_tem.append(dicts[str(road[i + 1])][0])
y_tem.append(dicts[str(road[i])][1])
except:
pass
x_ = []
y_ = []
for i in range(len(x_tem)):
x_.append(x_axis_data[i])
y_.append(y_axis_data[i])
x_.append(x_tem[i])
y_.append(y_tem[i])
colorVal = scalarMap.to_rgba(fff)
x_.append(x_axis_data[i + 1])
y_.append(y_axis_data[i + 1])
plt.plot(x_, y_, 'o-', alpha=0.8)
# for i in range(0,len(x_)-1):
# plt.arrow(x_[i], y_[i], x_[i+1] - x_[i], y_[i+1] - y_[i],
# length_includes_head=True, head_width=0.3, lw=2,)
for x, y in zip(x_axis_data, y_axis_data):
plt.text(x, y + 0.3, '({},{})'.format(x, y),)
plt.xlabel('X轴/km')
plt.ylabel('Y轴/km')
plt.show()
# plt.savefig('demo.jpg') # 保存该图片
| 25.330769
| 93
| 0.51898
| 589
| 3,293
| 2.711375
| 0.168081
| 0.110207
| 0.061991
| 0.041327
| 0.809017
| 0.809017
| 0.797746
| 0.797746
| 0.797746
| 0.797746
| 0
| 0.071778
| 0.25539
| 3,293
| 130
| 94
| 25.330769
| 0.579527
| 0.07106
| 0
| 0.836735
| 0
| 0
| 0.029713
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.020408
| 0.040816
| 0
| 0.040816
| 0.020408
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
992796fdb5509a43c66de22e35452020e2354baf
| 930
|
py
|
Python
|
LeetCode/easy - Hash Table/771. Jewels and Stones/solution.py
|
vincent507cpu/Comprehensive-Algorithm-Solution
|
04e01e49622457f09af2e1133954f043c0c92cb9
|
[
"MIT"
] | 4
|
2020-06-26T00:45:53.000Z
|
2021-04-19T12:23:32.000Z
|
LeetCode/easy - Hash Table/771. Jewels and Stones/solution.py
|
vincent507cpu/LeetCode-Comprehensive-Solution
|
04e01e49622457f09af2e1133954f043c0c92cb9
|
[
"MIT"
] | null | null | null |
LeetCode/easy - Hash Table/771. Jewels and Stones/solution.py
|
vincent507cpu/LeetCode-Comprehensive-Solution
|
04e01e49622457f09af2e1133954f043c0c92cb9
|
[
"MIT"
] | null | null | null |
# comprehensive solution
class Solution:
def numJewelsInStones(self, J: str, S: str) -> int:
# https://leetcode.com/problems/jewels-and-stones/discuss/527360/Several-Python-solution.-w-Explanation
jewel = set(J)
return sum( 1 for item in S if item in jewel )
def numJewelsInStones(self, J: str, S: str) -> int:
# https://leetcode.com/problems/jewels-and-stones/discuss/527360/Several-Python-solution.-w-Explanation
return sum( S.count(jewel) for jewel in J )
def numJewelsInStones(self, J: str, S: str) -> int:
# https://leetcode.com/problems/jewels-and-stones/discuss/?currentPage=1&orderBy=most_votes&query=
return sum(s in J for s in S)
def numJewelsInStones(self, J: str, S: str) -> int:
# https://leetcode.com/problems/jewels-and-stones/discuss/?currentPage=1&orderBy=most_votes&query=
return sum(map(J.count, S))
| 48.947368
| 111
| 0.662366
| 130
| 930
| 4.723077
| 0.3
| 0.130293
| 0.156352
| 0.162866
| 0.791531
| 0.791531
| 0.791531
| 0.791531
| 0.791531
| 0.791531
| 0
| 0.02027
| 0.204301
| 930
| 19
| 112
| 48.947368
| 0.809459
| 0.451613
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.3
| 0.9
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
996523a41a0eb3794adde74163976cf3f1b14f36
| 7,502
|
py
|
Python
|
penduduk/kabupaten/hitunggolongandarah.py
|
bowo-anakdesa/count-sidekem
|
8737d6cc788ba51ec6f919dbc2cedcced76ea139
|
[
"MIT"
] | 1
|
2019-06-28T02:02:51.000Z
|
2019-06-28T02:02:51.000Z
|
penduduk/kabupaten/hitunggolongandarah.py
|
bowo-anakdesa/count-sidekem
|
8737d6cc788ba51ec6f919dbc2cedcced76ea139
|
[
"MIT"
] | 1
|
2019-08-03T18:39:33.000Z
|
2019-08-03T18:39:33.000Z
|
penduduk/kabupaten/hitunggolongandarah.py
|
bowo-anakdesa/count-sidekem
|
8737d6cc788ba51ec6f919dbc2cedcced76ea139
|
[
"MIT"
] | null | null | null |
import pymysql
db = pymysql.connect(host="localhost",user="root",passwd="12345678", db="sidekem")
cur = db.cursor()
cur.execute("SELECT id FROM `statistik_goldarah_kab` WHERE id LIKE '%3327%' ")
kabupaten=cur.fetchall()
for a in kabupaten:
#Total Golongan Darah
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND goldarah LIKE 'A' ")
goldar1 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND goldarah LIKE 'B' ")
goldar2 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND goldarah LIKE 'AB' ")
goldar3 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND goldarah LIKE 'O' ")
goldar4 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND goldarah LIKE 'A+' ")
goldar5 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND goldarah LIKE 'A-' ")
goldar6 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND goldarah LIKE 'B+' ")
goldar7 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND goldarah LIKE 'B-' ")
goldar8 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND goldarah LIKE 'AB+' ")
goldar9 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND goldarah LIKE 'AB-' ")
goldar10 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND goldarah LIKE 'O+' ")
goldar11 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND goldarah LIKE 'O-' ")
goldar12 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND goldarah IN('-','','Tidak Tahu','Tdk Tahu') ")
goldar13 = str(cur.fetchone()[0])
#Laki-laki
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'L%' AND goldarah LIKE 'A' ")
goldar14 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'L%' AND goldarah LIKE 'B' ")
goldar15 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'L%' AND goldarah LIKE 'AB' ")
goldar16 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'L%' AND goldarah LIKE 'O' ")
goldar17 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'L%' AND goldarah LIKE 'A+' ")
goldar18 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'L%' AND goldarah LIKE 'A-' ")
goldar19 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'L%' AND goldarah LIKE 'B+' ")
goldar20 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'L%' AND goldarah LIKE 'B-' ")
goldar21 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'L%' AND goldarah LIKE 'AB+' ")
goldar22 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'L%' AND goldarah LIKE 'AB-' ")
goldar23 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'L%' AND goldarah LIKE 'O+' ")
goldar24 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'L%' AND goldarah LIKE 'O-' ")
goldar25 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'L%' AND goldarah IN('-','','Tidak Tahu','Tdk Tahu') ")
goldar26 = str(cur.fetchone()[0])
#Perempuan
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'P%' AND goldarah LIKE 'A' ")
goldar27 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'P%' AND goldarah LIKE 'B' ")
goldar28 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'P%' AND goldarah LIKE 'AB' ")
goldar29 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'P%' AND goldarah LIKE 'O' ")
goldar30 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'P%' AND goldarah LIKE 'A+' ")
goldar31 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'P%' AND goldarah LIKE 'A-' ")
goldar32 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'P%' AND goldarah LIKE 'B+' ")
goldar33 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'P%' AND goldarah LIKE 'B-' ")
goldar34 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'P%' AND goldarah LIKE 'AB+' ")
goldar35 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'P%' AND goldarah LIKE 'AB-' ")
goldar36 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'P%' AND goldarah LIKE 'O+' ")
goldar37 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'P%' AND goldarah LIKE 'O-' ")
goldar38 = str(cur.fetchone()[0])
cur.execute("SELECT COUNT(*) FROM `datapenduduk-33` WHERE kab_id='"+a[0]+"' AND jk LIKE 'P%' AND goldarah IN('-','','Tidak Tahu','Tdk Tahu') ")
goldar39 = str(cur.fetchone()[0])
#Eksekusi
cur.execute("UPDATE `statistik_goldarah_kab` SET `total_a`='"+goldar1+"', `total_b`='"+goldar2+"', `total_ab`='"+goldar3+"', `total_o`='"+goldar4+"', `total_a+`='"+goldar5+"', `total_a-`='"+goldar6+"', `total_b+`='"+goldar7+"', `total_b-`='"+goldar8+"', `total_ab+`='"+goldar9+"', `total_ab-`='"+goldar10+"', `total_o+`='"+goldar11+"', `total_o-`='"+goldar12+"', `total_tdk_tahu`='"+goldar13+"', `lk_a`='"+goldar14+"', `lk_b`='"+goldar15+"', `lk_ab`='"+goldar16+"', `lk_o`='"+goldar17+"', `lk_a+`='"+goldar18+"', `lk_a-`='"+goldar19+"', `lk_b+`='"+goldar20+"', `lk_b-`='"+goldar21+"', `lk_ab+`='"+goldar22+"', `lk_ab-`='"+goldar23+"', `lk_o+`='"+goldar24+"', `lk_o-`='"+goldar25+"', `lk_tdk_tahu`='"+goldar26+"', `pr_a`='"+goldar27+"', `pr_b`='"+goldar28+"', `pr_ab`='"+goldar29+"', `pr_o`='"+goldar30+"', `pr_a+`='"+goldar31+"', `pr_a-`='"+goldar32+"', `pr_b+`='"+goldar33+"', `pr_b-`='"+goldar34+"', `pr_ab+`='"+goldar35+"', `pr_ab-`='"+goldar36+"', `pr_o+`='"+goldar37+"', `pr_o-`='"+goldar38+"', `pr_tdk_tahu`='"+goldar39+"' WHERE id='"+a[0]+"' ")
db.commit()
| 83.355556
| 1,055
| 0.620501
| 1,107
| 7,502
| 4.128275
| 0.084914
| 0.089716
| 0.140044
| 0.179212
| 0.75733
| 0.75733
| 0.75733
| 0.745514
| 0.745514
| 0.745514
| 0
| 0.047678
| 0.141696
| 7,502
| 90
| 1,056
| 83.355556
| 0.662059
| 0.006132
| 0
| 0
| 0
| 0
| 0.565678
| 0.00644
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.011628
| 0.011628
| 0
| 0.011628
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9984290ad62ac9fca12bf6d7cea4d184b367d13a
| 13,405
|
py
|
Python
|
essentials_kit_management/tests/interactors/test_get_form_interactor.py
|
RajeshKumar1490/iB_hubs_mini_project
|
f7126092400fb9a62fb4bff643dae7cda3a8d9d2
|
[
"MIT"
] | null | null | null |
essentials_kit_management/tests/interactors/test_get_form_interactor.py
|
RajeshKumar1490/iB_hubs_mini_project
|
f7126092400fb9a62fb4bff643dae7cda3a8d9d2
|
[
"MIT"
] | 2
|
2021-09-07T07:06:00.000Z
|
2021-09-07T07:24:26.000Z
|
essentials_kit_management/tests/interactors/test_get_form_interactor.py
|
RajeshKumar1490/iB_hubs_mini_project
|
f7126092400fb9a62fb4bff643dae7cda3a8d9d2
|
[
"MIT"
] | null | null | null |
import pytest
from mock import create_autospec
from django_swagger_utils.drf_server.exceptions import NotFound
from essentials_kit_management.interactors.get_form_interactor \
import GetFormInteractor
from essentials_kit_management.interactors.storages.dtos \
import FormMetricsDto, FormDetailsDto, CompleteFormDetailsDto
from essentials_kit_management.interactors.storages.form_storage_interface \
import FormStorageInterface
from essentials_kit_management.interactors.storages.\
order_item_storage_interface import OrderItemStorageInterface
from essentials_kit_management.interactors.presenters.presenter_interface \
import PresenterInterface
def test_get_form_interactor_with_valid_form_id_returns_forms_details(
section_dtos, item_dtos, brand_dtos,
ordered_item_dtos, form_mock_presenter_response):
#Arrange
form_id = 1
user_id = 1
form_storage = create_autospec(FormStorageInterface)
order_item_storage = create_autospec(OrderItemStorageInterface)
presenter = create_autospec(PresenterInterface)
interactor = GetFormInteractor(
form_storage=form_storage, order_item_storage=order_item_storage,
presenter=presenter
)
mock_form_details = FormDetailsDto(
form_id=1,
form_name='SnacksForm',
form_description='This is form',
close_date='2020-05-17 20:22:46',
section_dtos=section_dtos,
item_dtos=item_dtos,
brand_dtos=brand_dtos
)
mock_ordered_items = ordered_item_dtos
mock_presenter_response = form_mock_presenter_response
complete_form_details_dto = CompleteFormDetailsDto(
form_details_dto=mock_form_details,
ordered_item_dtos=mock_ordered_items
)
expected_form_metrics_dto = \
FormMetricsDto(total_cost=4500.0, total_items=15)
form_storage.validate_form_id.return_value = True
form_storage.get_form_details_dto.return_value = mock_form_details
order_item_storage.get_user_ordered_item_dtos_of_form.return_value = \
mock_ordered_items
presenter.get_form_details_response.return_value = mock_presenter_response
#Act
form_details = interactor.get_form_details(
form_id=form_id, user_id=user_id
)
#Assert
assert form_details == mock_presenter_response
form_storage.validate_form_id.assert_called_once_with(form_id=form_id)
form_storage.get_form_details_dto.assert_called_once_with(
form_id=form_id, user_id=user_id
)
order_item_storage.get_user_ordered_item_dtos_of_form.\
assert_called_once_with(item_dtos=item_dtos, user_id=user_id)
presenter.get_form_details_response.assert_called_once_with(
complete_form_details_dto=complete_form_details_dto,
form_metrics_dto=expected_form_metrics_dto
)
def test_get_form_interactor_when_no_sections_returns_empty_section_list(
item_dtos, brand_dtos,
ordered_item_dtos, form_mock_presenter_response):
#Arrange
form_id = 1
user_id = 1
form_storage = create_autospec(FormStorageInterface)
order_item_storage = create_autospec(OrderItemStorageInterface)
presenter = create_autospec(PresenterInterface)
interactor = GetFormInteractor(
form_storage=form_storage, order_item_storage=order_item_storage,
presenter=presenter
)
mock_form_details = FormDetailsDto(
form_id=1,
form_name='SnacksForm',
form_description='This is form',
close_date='2020-05-17 20:22:46',
section_dtos=[],
item_dtos=item_dtos,
brand_dtos=brand_dtos
)
mock_ordered_items = ordered_item_dtos
mock_presenter_response = form_mock_presenter_response
complete_form_details_dto = CompleteFormDetailsDto(
form_details_dto=mock_form_details,
ordered_item_dtos=mock_ordered_items
)
expected_form_metrics_dto = \
FormMetricsDto(total_cost=4500.0, total_items=15)
form_storage.validate_form_id.return_value = True
form_storage.get_form_details_dto.return_value = mock_form_details
order_item_storage.get_user_ordered_item_dtos_of_form.return_value = \
mock_ordered_items
presenter.get_form_details_response.return_value = mock_presenter_response
#Act
form_details = interactor.get_form_details(
form_id=form_id, user_id=user_id
)
#Assert
assert form_details == mock_presenter_response
form_storage.validate_form_id.assert_called_once_with(form_id=form_id)
form_storage.get_form_details_dto.assert_called_once_with(
form_id=form_id, user_id=user_id
)
order_item_storage.get_user_ordered_item_dtos_of_form.\
assert_called_once_with(item_dtos=item_dtos, user_id=user_id)
presenter.get_form_details_response.assert_called_once_with(
complete_form_details_dto=complete_form_details_dto,
form_metrics_dto=expected_form_metrics_dto
)
def test_get_form_interactor_when_no_items_returns_empty_items_list(
section_dtos, brand_dtos,
ordered_item_dtos, form_mock_presenter_response):
#Arrange
form_id = 1
user_id = 1
form_storage = create_autospec(FormStorageInterface)
order_item_storage = create_autospec(OrderItemStorageInterface)
presenter = create_autospec(PresenterInterface)
interactor = GetFormInteractor(
form_storage=form_storage, order_item_storage=order_item_storage,
presenter=presenter
)
item_dtos = []
mock_form_details = FormDetailsDto(
form_id=1,
form_name='SnacksForm',
form_description='This is form',
close_date='2020-05-17 20:22:46',
section_dtos=section_dtos,
item_dtos=item_dtos,
brand_dtos=brand_dtos
)
mock_ordered_items = ordered_item_dtos
mock_presenter_response = form_mock_presenter_response
complete_form_details_dto = CompleteFormDetailsDto(
form_details_dto=mock_form_details,
ordered_item_dtos=mock_ordered_items
)
expected_form_metrics_dto = \
FormMetricsDto(total_cost=4500.0, total_items=15)
form_storage.validate_form_id.return_value = True
form_storage.get_form_details_dto.return_value = mock_form_details
order_item_storage.get_user_ordered_item_dtos_of_form.return_value = \
mock_ordered_items
presenter.get_form_details_response.return_value = mock_presenter_response
#Act
form_details = interactor.get_form_details(
form_id=form_id, user_id=user_id
)
#Assert
assert form_details == mock_presenter_response
form_storage.validate_form_id.assert_called_once_with(form_id=form_id)
form_storage.get_form_details_dto.assert_called_once_with(
form_id=form_id, user_id=user_id
)
order_item_storage.get_user_ordered_item_dtos_of_form.\
assert_called_once_with(item_dtos=item_dtos, user_id=user_id)
presenter.get_form_details_response.assert_called_once_with(
complete_form_details_dto=complete_form_details_dto,
form_metrics_dto=expected_form_metrics_dto
)
def test_get_form_interactor_when_no_brands_returns_empty_brands_list(
item_dtos, section_dtos,
ordered_item_dtos, form_mock_presenter_response):
#Arrange
form_id = 1
user_id = 1
form_storage = create_autospec(FormStorageInterface)
order_item_storage = create_autospec(OrderItemStorageInterface)
presenter = create_autospec(PresenterInterface)
interactor = GetFormInteractor(
form_storage=form_storage, order_item_storage=order_item_storage,
presenter=presenter
)
mock_form_details = FormDetailsDto(
form_id=1,
form_name='SnacksForm',
form_description='This is form',
close_date='2020-05-17 20:22:46',
section_dtos=section_dtos,
item_dtos=item_dtos,
brand_dtos=[]
)
mock_ordered_items = ordered_item_dtos
mock_presenter_response = form_mock_presenter_response
complete_form_details_dto = CompleteFormDetailsDto(
form_details_dto=mock_form_details,
ordered_item_dtos=mock_ordered_items
)
expected_form_metrics_dto = \
FormMetricsDto(total_cost=4500.0, total_items=15)
form_storage.validate_form_id.return_value = True
form_storage.get_form_details_dto.return_value = mock_form_details
order_item_storage.get_user_ordered_item_dtos_of_form.return_value = \
mock_ordered_items
presenter.get_form_details_response.return_value = mock_presenter_response
#Act
form_details = interactor.get_form_details(
form_id=form_id, user_id=user_id
)
#Assert
assert form_details == mock_presenter_response
form_storage.validate_form_id.assert_called_once_with(form_id=form_id)
form_storage.get_form_details_dto.assert_called_once_with(
form_id=form_id, user_id=user_id
)
order_item_storage.get_user_ordered_item_dtos_of_form.\
assert_called_once_with(item_dtos=item_dtos, user_id=user_id)
presenter.get_form_details_response.assert_called_once_with(
complete_form_details_dto=complete_form_details_dto,
form_metrics_dto=expected_form_metrics_dto
)
def test_get_form_interactor_when_no_ordered_items_returns_empty_ordered_items_list(
item_dtos, section_dtos,
brand_dtos, form_mock_presenter_response):
#Arrange
form_id = 1
user_id = 1
form_storage = create_autospec(FormStorageInterface)
order_item_storage = create_autospec(OrderItemStorageInterface)
presenter = create_autospec(PresenterInterface)
interactor = GetFormInteractor(
form_storage=form_storage, order_item_storage=order_item_storage,
presenter=presenter
)
mock_form_details = FormDetailsDto(
form_id=1,
form_name='SnacksForm',
form_description='This is form',
close_date='2020-05-17 20:22:46',
section_dtos=section_dtos,
item_dtos=item_dtos,
brand_dtos=brand_dtos
)
mock_ordered_items = []
mock_presenter_response = form_mock_presenter_response
complete_form_details_dto = CompleteFormDetailsDto(
form_details_dto=mock_form_details,
ordered_item_dtos=mock_ordered_items
)
expected_form_metrics_dto = \
FormMetricsDto(total_cost=0.0, total_items=0)
form_storage.validate_form_id.return_value = True
form_storage.get_form_details_dto.return_value = mock_form_details
order_item_storage.get_user_ordered_item_dtos_of_form.return_value = \
mock_ordered_items
presenter.get_form_details_response.return_value = mock_presenter_response
#Act
form_details = interactor.get_form_details(
form_id=form_id, user_id=user_id
)
#Assert
assert form_details == mock_presenter_response
form_storage.validate_form_id.assert_called_once_with(form_id=form_id)
form_storage.get_form_details_dto.assert_called_once_with(
form_id=form_id, user_id=user_id
)
order_item_storage.get_user_ordered_item_dtos_of_form.\
assert_called_once_with(item_dtos=item_dtos, user_id=user_id)
presenter.get_form_details_response.assert_called_once_with(
complete_form_details_dto=complete_form_details_dto,
form_metrics_dto=expected_form_metrics_dto
)
def test_get_form_interactor_when_no_ordered_items_returns_metrics_values_zero(
item_dtos, section_dtos,
brand_dtos, form_mock_presenter_response):
#Arrange
form_id = 1
user_id = 1
form_storage = create_autospec(FormStorageInterface)
order_item_storage = create_autospec(OrderItemStorageInterface)
presenter = create_autospec(PresenterInterface)
interactor = GetFormInteractor(
form_storage=form_storage, order_item_storage=order_item_storage,
presenter=presenter
)
mock_form_details = FormDetailsDto(
form_id=1,
form_name='SnacksForm',
form_description='This is form',
close_date='2020-05-17 20:22:46',
section_dtos=section_dtos,
item_dtos=item_dtos,
brand_dtos=brand_dtos
)
mock_ordered_items = []
mock_presenter_response = form_mock_presenter_response
complete_form_details_dto = CompleteFormDetailsDto(
form_details_dto=mock_form_details,
ordered_item_dtos=mock_ordered_items
)
expected_form_metrics_dto = \
FormMetricsDto(total_cost=0.0, total_items=0)
form_storage.validate_form_id.return_value = True
form_storage.get_form_details_dto.return_value = mock_form_details
order_item_storage.get_user_ordered_item_dtos_of_form.return_value = \
mock_ordered_items
presenter.get_form_details_response.return_value = mock_presenter_response
#Act
form_details = interactor.get_form_details(
form_id=form_id, user_id=user_id
)
#Assert
assert form_details == mock_presenter_response
form_storage.validate_form_id.assert_called_once_with(form_id=form_id)
form_storage.get_form_details_dto.assert_called_once_with(
form_id=form_id, user_id=user_id
)
order_item_storage.get_user_ordered_item_dtos_of_form.\
assert_called_once_with(item_dtos=item_dtos, user_id=user_id)
presenter.get_form_details_response.assert_called_once_with(
complete_form_details_dto=complete_form_details_dto,
form_metrics_dto=expected_form_metrics_dto
)
| 36.327913
| 84
| 0.766953
| 1,739
| 13,405
| 5.359402
| 0.051754
| 0.099142
| 0.054077
| 0.051502
| 0.944206
| 0.930579
| 0.915773
| 0.915773
| 0.915773
| 0.915773
| 0
| 0.01233
| 0.177173
| 13,405
| 368
| 85
| 36.42663
| 0.832638
| 0.007162
| 0
| 0.798013
| 0
| 0
| 0.018509
| 0
| 0
| 0
| 0
| 0
| 0.099338
| 1
| 0.019868
| false
| 0
| 0.02649
| 0
| 0.046358
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
999b9c18282b9bc65244eb14654afc97bc2b6dde
| 259
|
py
|
Python
|
03_GraphBasedPlanner/graph_ltpl/helper_funcs/src/__init__.py
|
f1tenth/ESweek2021_educationclassA3
|
7620a36d21c1824efba8a83f0671926bf8e028f3
|
[
"MIT"
] | 15
|
2021-10-09T13:48:49.000Z
|
2022-03-27T04:36:44.000Z
|
03_GraphBasedPlanner/graph_ltpl/helper_funcs/src/__init__.py
|
yinflight/ESweek2021_educationclassA3
|
7a32bacdb7f3154a773d28b6b6abffdaa154a526
|
[
"MIT"
] | 1
|
2021-11-27T01:47:25.000Z
|
2021-11-27T02:44:04.000Z
|
03_GraphBasedPlanner/graph_ltpl/helper_funcs/src/__init__.py
|
yinflight/ESweek2021_educationclassA3
|
7a32bacdb7f3154a773d28b6b6abffdaa154a526
|
[
"MIT"
] | 2
|
2021-11-03T19:32:55.000Z
|
2021-11-27T02:43:13.000Z
|
import graph_ltpl.helper_funcs.src.calc_vel_profile_follow
import graph_ltpl.helper_funcs.src.closest_path_index
import graph_ltpl.helper_funcs.src.get_s_coord
import graph_ltpl.helper_funcs.src.Logging
import graph_ltpl.helper_funcs.src.calc_brake_emergency
| 43.166667
| 58
| 0.903475
| 44
| 259
| 4.886364
| 0.431818
| 0.255814
| 0.348837
| 0.488372
| 0.711628
| 0.711628
| 0.306977
| 0
| 0
| 0
| 0
| 0
| 0.03861
| 259
| 5
| 59
| 51.8
| 0.863454
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
510a89e957ea22ea9acb65a1bf4c6b9c217069b3
| 194
|
py
|
Python
|
vnegmas/backend/api/nnegmas/__init__.py
|
YueNing/vnegmas
|
e95adc56ee9aab8d6cd6f28cce04383e199dc2b8
|
[
"MIT"
] | 3
|
2019-06-29T11:40:29.000Z
|
2019-09-07T02:15:09.000Z
|
vnegmas/backend/api/nnegmas/__init__.py
|
YueNing/vnegmas
|
e95adc56ee9aab8d6cd6f28cce04383e199dc2b8
|
[
"MIT"
] | null | null | null |
vnegmas/backend/api/nnegmas/__init__.py
|
YueNing/vnegmas
|
e95adc56ee9aab8d6cd6f28cce04383e199dc2b8
|
[
"MIT"
] | null | null | null |
from vnegmas.backend.src import nnegmas
from vnegmas.backend.src.nnegmas import negmas_draw
from vnegmas.backend.api.nnegmas.negmas_api import *
from vnegmas.backend.src.nnegmas import watch_fs
| 38.8
| 52
| 0.850515
| 30
| 194
| 5.4
| 0.366667
| 0.271605
| 0.444444
| 0.388889
| 0.419753
| 0.419753
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082474
| 194
| 4
| 53
| 48.5
| 0.910112
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
510b206e3ad1fd0485010cb46e4d3eb0f14f22a5
| 121
|
py
|
Python
|
tests/context.py
|
lzutao/classical_cipher
|
95612f03600a31d7fe325f637335a4c69b56cf6c
|
[
"MIT"
] | null | null | null |
tests/context.py
|
lzutao/classical_cipher
|
95612f03600a31d7fe325f637335a4c69b56cf6c
|
[
"MIT"
] | null | null | null |
tests/context.py
|
lzutao/classical_cipher
|
95612f03600a31d7fe325f637335a4c69b56cf6c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
| 24.2
| 76
| 0.768595
| 21
| 121
| 4.238095
| 0.571429
| 0.202247
| 0.292135
| 0.337079
| 0.359551
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008772
| 0.057851
| 121
| 4
| 77
| 30.25
| 0.77193
| 0.173554
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
5ad32114346358b0ba04fcead845f3e40b3b8323
| 171
|
py
|
Python
|
apps/configuration/admin.py
|
Sunbird-Ed/evolve-api
|
371b39422839762e32401340456c13858cb8e1e9
|
[
"MIT"
] | 1
|
2019-02-27T15:26:11.000Z
|
2019-02-27T15:26:11.000Z
|
apps/configuration/admin.py
|
Sunbird-Ed/evolve-api
|
371b39422839762e32401340456c13858cb8e1e9
|
[
"MIT"
] | 9
|
2019-12-16T10:09:46.000Z
|
2022-03-11T23:42:12.000Z
|
apps/configuration/admin.py
|
Sunbird-Ed/evolve-api
|
371b39422839762e32401340456c13858cb8e1e9
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import Book,State,Subject,Grade,Medium
admin.site.register([Book,State,Subject,Grade,Medium])
| 24.428571
| 54
| 0.80117
| 25
| 171
| 5.48
| 0.6
| 0.131387
| 0.233577
| 0.306569
| 0.394161
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093567
| 171
| 7
| 54
| 24.428571
| 0.883871
| 0.152047
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5c8a4475ba59114657523865785d3cbc17c30a33
| 11,761
|
py
|
Python
|
test/selenium/smoke/Service_Offering.py
|
elShiaLabeouf/cloudstack
|
3c5580632425ded5a468c3cd82cd141e7410ef39
|
[
"Apache-2.0"
] | 1
|
2020-03-22T14:55:12.000Z
|
2020-03-22T14:55:12.000Z
|
test/selenium/smoke/Service_Offering.py
|
elShiaLabeouf/cloudstack
|
3c5580632425ded5a468c3cd82cd141e7410ef39
|
[
"Apache-2.0"
] | null | null | null |
test/selenium/smoke/Service_Offering.py
|
elShiaLabeouf/cloudstack
|
3c5580632425ded5a468c3cd82cd141e7410ef39
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys, os
sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/'+'../lib'))
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
import unittest, time
import initialize
import Global_Locators
class Disk_offering_Add(unittest.TestCase):
def setUp(self):
self.driver = initialize.getOrCreateWebdriver()
self.verificationErrors = []
def test_diskadd(self):
driver = self.driver
self.driver.implicitly_wait(200)
#Make sure you are on Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(2)
# Go to Service Offerings
driver.find_element_by_xpath(Global_Locators.serviceOfferings_xpath).click()
#Select Disk offering
driver.find_element_by_xpath(Global_Locators.Offering_disk_xpath).click()
# Add offering
driver.find_element_by_xpath(Global_Locators.Offering_add_xpath).click()
# Following have names.. so they do not have their global entries.
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys("Test Disk Name")
driver.find_element_by_name("description").clear()
driver.find_element_by_name("description").send_keys("Test Disk Description")
driver.find_element_by_name("disksize").clear()
driver.find_element_by_name("disksize").send_keys("1")
driver.find_element_by_xpath("//button[@type='button']").click()
time.sleep(20)
##Verification will be if this offering shows up into table and we can actually edit it.
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
class Disk_offering_Edit(unittest.TestCase):
def setUp(self):
self.driver = initialize.getOrCreateWebdriver()
self.verificationErrors = []
def test_diskedit(self):
driver = self.driver
self.driver.implicitly_wait(200)
#Make sure you are on Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(2)
# Go to Service Offerings
driver.find_element_by_xpath(Global_Locators.serviceOfferings_xpath).click()
#Select Disk offering
driver.find_element_by_xpath(Global_Locators.Offering_disk_xpath).click()
# We will be searching for our disk offering into the table
linkclass = None
linkclass = driver.find_elements_by_xpath(Global_Locators.Offering_table_xpath) # This returns a list of all Offerings in table
for link in linkclass:
if link.text == "Test Disk Name":
link.click()
time.sleep(2)
# Click Edit
driver.find_element_by_css_selector(Global_Locators.Offering_edit_css).click()
#Change name
driver.find_element_by_name(Global_Locators.Offering_editname_name).clear()
driver.find_element_by_name(Global_Locators.Offering_editname_name).send_keys("Test Name")
# Change Description
driver.find_element_by_name(Global_Locators.Offering_editdescription_name).clear()
driver.find_element_by_name(Global_Locators.Offering_editdescription_name).send_keys("Test Description")
#Click Done
driver.find_element_by_css_selector(Global_Locators.Offering_editdone_css).click()
time.sleep(10)
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
# Now we will find this offering and delete it!!
class Disk_offering_Delete(unittest.TestCase):
def setUp(self):
self.driver = initialize.getOrCreateWebdriver()
self.verificationErrors = []
def test_diskdelete(self):
driver = self.driver
self.driver.implicitly_wait(200)
#Make sure you are on Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(2)
# Go to Service Offerings
driver.find_element_by_xpath(Global_Locators.serviceOfferings_xpath).click()
#Select Disk offering
driver.find_element_by_xpath(Global_Locators.Offering_disk_xpath).click()
## Action part
# We will be searching for our disk offering into the table
linkclass = None
linkclass = driver.find_elements_by_xpath(Global_Locators.Offering_table_xpath) # This returns a list of all Offerings in table
for link in linkclass:
if link.text == "Test Name":
link.click()
time.sleep(2)
# Click Delete
driver.find_element_by_css_selector(Global_Locators.Offering_delete_css).click()
time.sleep(2)
driver.find_element_by_xpath(Global_Locators.yesconfirmation_xapth).click()
time.sleep(20)
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
class Compute_offering_Add(unittest.TestCase):
def setUp(self):
self.driver = initialize.getOrCreateWebdriver()
self.verificationErrors = []
def test_computeadd(self):
driver = self.driver
self.driver.implicitly_wait(200)
#Make sure you are on Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(2)
# Go to Service Offerings
driver.find_element_by_xpath(Global_Locators.serviceOfferings_xpath).click()
#Select Compute offering
driver.find_element_by_xpath(Global_Locators.Offering_compute_xpath).click()
## Action part
# Add offering
driver.find_element_by_xpath(Global_Locators.Offering_add_xpath).click()
# Following do not have Global locators
driver.find_element_by_id("label_name").clear()
driver.find_element_by_id("label_name").send_keys("Test Compute Name")
driver.find_element_by_id("label_description").clear()
driver.find_element_by_id("label_description").send_keys("Test Compute Description")
driver.find_element_by_id("label_num_cpu_cores").clear()
driver.find_element_by_id("label_num_cpu_cores").send_keys("2")
driver.find_element_by_id("label_cpu_mhz").clear()
driver.find_element_by_id("label_cpu_mhz").send_keys("2000")
driver.find_element_by_id("label_memory_mb").clear()
driver.find_element_by_id("label_memory_mb").send_keys("2048")
driver.find_element_by_id("label_network_rate").clear()
driver.find_element_by_id("label_network_rate").send_keys("10")
driver.find_element_by_id("label_offer_ha").click()
driver.find_element_by_xpath("//button[@type='button']").click()
time.sleep(2)
#Make sure you are on Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(30)
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
class Compute_offering_Edit(unittest.TestCase):
def setUp(self):
self.driver = initialize.getOrCreateWebdriver()
self.verificationErrors = []
def test_computeedit(self):
driver = self.driver
self.driver.implicitly_wait(200)
#Make sure you are on Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(2)
## Action part
# Go to Service Offerings
driver.find_element_by_xpath(Global_Locators.serviceOfferings_xpath).click()
#Select Compute offering
driver.find_element_by_xpath(Global_Locators.Offering_compute_xpath).click()
# We will be searching for our disk offering into the table
linkclass = None
linkclass = driver.find_elements_by_xpath(Global_Locators.Offering_table_xpath) # This returns a list of all Offerings in table
for link in linkclass:
if link.text == "Test Compute Name":
link.click()
time.sleep(2)
# Click Edit
driver.find_element_by_css_selector(Global_Locators.Offering_edit_css).click()
#Change name
driver.find_element_by_name(Global_Locators.Offering_editname_name).clear()
driver.find_element_by_name(Global_Locators.Offering_editname_name).send_keys("Test Name")
# Change Description
driver.find_element_by_name(Global_Locators.Offering_editdescription_name).clear()
driver.find_element_by_name(Global_Locators.Offering_editdescription_name).send_keys("Test Description")
#Click Done
driver.find_element_by_css_selector(Global_Locators.Offering_editdone_css).click()
time.sleep(10)
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
class Compute_offering_Delete(unittest.TestCase):
def setUp(self):
self.driver = initialize.getOrCreateWebdriver()
self.verificationErrors = []
def test_computedelete(self):
driver = self.driver
self.driver.implicitly_wait(200)
#Make sure you are on Dashboard
driver.find_element_by_xpath(Global_Locators.dashboard_xpath).click()
time.sleep(2)
# Go to Service Offerings
driver.find_element_by_xpath(Global_Locators.serviceOfferings_xpath).click()
#Select Compute offering
driver.find_element_by_xpath(Global_Locators.Offering_compute_xpath).click()
## Action part
# We will be searching for our disk offering into the table
linkclass = None
linkclass = driver.find_elements_by_xpath(Global_Locators.Offering_table_xpath) # This returns a list of all Offerings in table
for link in linkclass:
if link.text == "Test Name":
link.click()
time.sleep(2)
# Click Delete
driver.find_element_by_css_selector(Global_Locators.Offering_deletecompute_css).click()
driver.find_element_by_xpath(Global_Locators.yesconfirmation_xapth).click()
time.sleep(20)
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException, e: return False
return True
def tearDown(self):
self.assertEqual([], self.verificationErrors)
| 27.543326
| 133
| 0.718646
| 1,509
| 11,761
| 5.346587
| 0.139828
| 0.084284
| 0.134854
| 0.150719
| 0.835771
| 0.833664
| 0.806644
| 0.78235
| 0.756817
| 0.747645
| 0
| 0.006123
| 0.194541
| 11,761
| 426
| 134
| 27.607981
| 0.845561
| 0.177621
| 0
| 0.754098
| 0
| 0
| 0.050609
| 0.004998
| 0
| 0
| 0
| 0
| 0.032787
| 0
| null | null | 0
| 0.043716
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5cc17aac179e8f86aeefe515f5f4dd4eb5ddbe90
| 33,816
|
py
|
Python
|
test_pytest/test_sys/test_monitor/test_workflow.py
|
hrvojekeserica/hat-core
|
759def68620cf4f8c11e7bbbdbfd1e701dbafb09
|
[
"MIT"
] | null | null | null |
test_pytest/test_sys/test_monitor/test_workflow.py
|
hrvojekeserica/hat-core
|
759def68620cf4f8c11e7bbbdbfd1e701dbafb09
|
[
"MIT"
] | null | null | null |
test_pytest/test_sys/test_monitor/test_workflow.py
|
hrvojekeserica/hat-core
|
759def68620cf4f8c11e7bbbdbfd1e701dbafb09
|
[
"MIT"
] | null | null | null |
import asyncio
import pytest
from hat import util
from test_sys.test_monitor import common
@pytest.mark.timeout(10)
def test_server_listens(monitor_factory):
server_info = monitor_factory()
connections = server_info.process.connections()
for port in {server_info.ui_port, server_info.monitor_port,
server_info.master_port}:
assert util.first(connections, lambda c: (c.laddr.ip == '0.0.0.0'
and c.laddr.port == port))
@pytest.mark.timeout(10)
@pytest.mark.asyncio
async def test_single_component(monitor_factory, component_factory,
ui_client_factory):
server_info = monitor_factory()
component_name = 'name'
component_group = 'group'
component = await component_factory(component_name, component_group,
server_info)
ui_client = await ui_client_factory(server_info)
info, components = await component.next_state()
assert info is None
assert components == []
info, components = await component.next_state()
assert info.name == component_name
assert info.group == component_group
assert info.rank == 1
assert info.blessing is not None
assert info.ready is None
assert components == [info]
ui_state = await ui_client.get_state()
ui_info = common.find_ui_info(ui_state, info)
assert ui_info
assert ui_info['name'] == info.name
assert ui_info['group'] == info.group
component.client.set_ready(info.blessing)
info, components = await component.next_state()
assert info.blessing is not None
assert info.ready is not None
assert info.ready == info.blessing
assert components == [info]
ui_state = await ui_client.get_state()
ui_info = common.find_ui_info(ui_state, info)
assert ui_info
assert ui_info['blessing'] == info.blessing
assert ui_info['ready'] == ui_info['blessing']
component.client.set_ready(None)
info, components = await component.next_state()
assert info.ready is None
assert info.blessing != info.ready
assert components == [info]
ui_state = await ui_client.get_state()
ui_info = common.find_ui_info(ui_state, info)
assert ui_info
assert ui_info['blessing'] == info.blessing
assert ui_info['ready'] is None
await ui_client.set_rank(info, 5)
info, components = await component.next_state()
assert info.rank == 5
ui_state = await ui_client.get_state()
ui_info = common.find_ui_info(ui_state, info)
assert ui_info['rank'] == 5
@pytest.mark.timeout(10)
@pytest.mark.asyncio
async def test_bless_all(cluster_factory):
cluster = await cluster_factory({
'g1': {'components': ['c1', 'c2']},
'g2': {'components': ['c3']}})
component1 = cluster.components['g1']['c1']
component2 = cluster.components['g1']['c2']
component3 = cluster.components['g2']['c3']
ui_client = cluster.ui_client
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
info3, components3 = await component3.newest_state()
ui_state = await ui_client.get_state()
assert components1 == components2 and components2 == components3
assert info1.blessing is not None
assert info2.blessing is not None
assert info3.blessing is not None
ui_info1 = common.find_ui_info(ui_state, info1)
ui_info2 = common.find_ui_info(ui_state, info2)
ui_info3 = common.find_ui_info(ui_state, info3)
assert ui_info1 and ui_info2 and ui_info3
assert ui_info1['blessing'] is not None
assert ui_info2['blessing'] is not None
assert ui_info3['blessing'] is not None
component1.client.set_ready(info1.blessing)
component2.client.set_ready(info2.blessing)
component3.client.set_ready(info3.blessing)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
info3, components3 = await component3.newest_state()
ui_state = await ui_client.get_state()
assert info1.blessing is not None
assert info2.blessing is not None
assert info3.blessing is not None
assert info1.ready == info1.blessing
assert info2.ready == info2.blessing
assert info3.ready == info3.blessing
ui_info1 = common.find_ui_info(ui_state, info1)
ui_info2 = common.find_ui_info(ui_state, info2)
ui_info3 = common.find_ui_info(ui_state, info3)
assert ui_info1 and ui_info2 and ui_info3
assert ui_info1['ready'] == ui_info1['blessing']
assert ui_info2['ready'] == ui_info2['blessing']
assert ui_info3['ready'] == ui_info3['blessing']
await ui_client.set_rank(info1, 6)
await ui_client.set_rank(info2, 2)
await ui_client.set_rank(info3, 4)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
info3, components3 = await component3.newest_state()
ui_state = await ui_client.get_state()
assert info1.blessing is not None
assert info2.blessing is not None
assert info3.blessing is not None
assert info1.rank == 6
assert info2.rank == 2
assert info3.rank == 4
@pytest.mark.timeout(10)
@pytest.mark.asyncio
async def test_bless_one(cluster_factory):
cluster = await cluster_factory({
'group': {
'components': ['c1', 'c2']}
}, default_algorithm='BLESS_ONE')
component1 = cluster.components['group']['c1']
component2 = cluster.components['group']['c2']
ui_client = cluster.ui_client
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
ui_state = await ui_client.get_state()
ui_info1 = common.find_ui_info(ui_state, info1)
ui_info2 = common.find_ui_info(ui_state, info2)
assert info1.blessing is not None
assert info2.blessing is None
assert ui_info1['blessing'] == info1.blessing
assert ui_info2['blessing'] is None
component1.client.set_ready(info1.blessing)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
ui_state = await ui_client.get_state()
ui_info1 = common.find_ui_info(ui_state, info1)
assert info1.ready == info1.blessing
assert ui_info1['ready'] == info1.ready
assert ui_info1['blessing'] == ui_info1['ready']
await ui_client.set_rank(info1, 10)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
ui_state = await ui_client.get_state()
ui_info1 = common.find_ui_info(ui_state, info1)
ui_info2 = common.find_ui_info(ui_state, info2)
assert info1.blessing is None
assert info2.blessing is None
assert ui_info1['blessing'] is None
assert ui_info2['blessing'] is None
component1.client.set_ready(None)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
ui_state = await ui_client.get_state()
ui_info1 = common.find_ui_info(ui_state, info1)
ui_info2 = common.find_ui_info(ui_state, info2)
assert info1.blessing is None
assert info2.blessing is not None
assert ui_info1['blessing'] is None
assert ui_info2['blessing'] == info2.blessing
component2.client.set_ready(info2.blessing)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
ui_state = await ui_client.get_state()
ui_info1 = common.find_ui_info(ui_state, info1)
ui_info2 = common.find_ui_info(ui_state, info2)
assert info1.blessing is None
assert info2.blessing is not None and info2.blessing == info2.ready
assert ui_info1['blessing'] is None
assert ui_info2['ready'] == info2.ready
@pytest.mark.timeout(10)
@pytest.mark.asyncio
async def test_bless_all_group_setting(cluster_factory):
cluster = await cluster_factory({
'group': {
'algorithm': 'BLESS_ALL',
'components': ['c1', 'c2']}
}, default_algorithm='BLESS_ONE')
component1 = cluster.components['group']['c1']
component2 = cluster.components['group']['c2']
ui_client = cluster.ui_client
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
ui_state = await ui_client.get_state()
assert components1 == components2
assert info1.blessing is not None
assert info2.blessing is not None
ui_info1 = common.find_ui_info(ui_state, info1)
ui_info2 = common.find_ui_info(ui_state, info2)
assert ui_info1 and ui_info2
assert ui_info1['blessing']
assert ui_info2['blessing']
component1.client.set_ready(info1.blessing)
component2.client.set_ready(info2.blessing)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
ui_state = await ui_client.get_state()
assert info1.ready == info1.blessing
assert info2.ready == info2.blessing
ui_info1 = common.find_ui_info(ui_state, info1)
ui_info2 = common.find_ui_info(ui_state, info2)
assert ui_info1 and ui_info2
assert ui_info1['ready'] == ui_info1['blessing']
assert ui_info2['ready'] == ui_info2['blessing']
await ui_client.set_rank(info1, 6)
await ui_client.set_rank(info2, 2)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
ui_state = await ui_client.get_state()
assert info1.blessing is not None and info2.blessing is not None
assert info1.rank == 6
assert info2.rank == 2
@pytest.mark.timeout(10)
@pytest.mark.asyncio
async def test_bless_one_group_setting(cluster_factory):
cluster = await cluster_factory(group_conf={
'redundant': {
'algorithm': 'BLESS_ONE',
'components': ['primary', 'secondary']}})
primary = cluster.components['redundant']['primary']
secondary = cluster.components['redundant']['secondary']
ui_client = cluster.ui_client
await asyncio.sleep(0.5)
info1, components1 = await primary.newest_state()
info2, components2 = await secondary.newest_state()
ui_state = await ui_client.get_state()
ui_info1 = common.find_ui_info(ui_state, info1)
ui_info2 = common.find_ui_info(ui_state, info2)
assert info1.blessing is not None
assert info2.blessing is None
assert ui_info1['blessing'] == info1.blessing
assert ui_info2['blessing'] is None
primary.client.set_ready(info1.blessing)
await asyncio.sleep(0.5)
info1, components1 = await primary.newest_state()
info2, components2 = await secondary.newest_state()
ui_state = await ui_client.get_state()
ui_info1 = common.find_ui_info(ui_state, info1)
assert info1.ready == info1.blessing
assert ui_info1['ready'] == info1.ready
assert ui_info1['blessing'] == ui_info1['ready']
await ui_client.set_rank(info1, 10)
await asyncio.sleep(0.5)
info1, components1 = await primary.newest_state()
info2, components2 = await secondary.newest_state()
ui_state = await ui_client.get_state()
ui_info1 = common.find_ui_info(ui_state, info1)
ui_info2 = common.find_ui_info(ui_state, info2)
assert info1.blessing is None
assert info2.blessing is None
assert ui_info1['blessing'] is None
assert ui_info2['blessing'] is None
primary.client.set_ready(None)
await asyncio.sleep(0.5)
info1, components1 = await primary.newest_state()
info2, components2 = await secondary.newest_state()
ui_state = await ui_client.get_state()
ui_info1 = common.find_ui_info(ui_state, info1)
ui_info2 = common.find_ui_info(ui_state, info2)
assert info1.blessing is None
assert info2.blessing is not None
assert ui_info1['blessing'] is None
assert ui_info2['blessing'] == info2.blessing
secondary.client.set_ready(info2.blessing)
await asyncio.sleep(0.5)
info1, components1 = await primary.newest_state()
info2, components2 = await secondary.newest_state()
ui_state = await ui_client.get_state()
ui_info1 = common.find_ui_info(ui_state, info1)
ui_info2 = common.find_ui_info(ui_state, info2)
assert info1.blessing is None
assert info2.blessing is not None and info2.blessing == info2.ready
assert ui_info1['blessing'] is None
assert ui_info2['ready'] == info2.ready
@pytest.mark.timeout(10)
def test_master_slave(monitor_factory):
slave = monitor_factory()
master = monitor_factory(parent_infos=[slave])
assert util.first(master.process.connections(),
lambda conn: (conn.raddr.port == slave.master_port
and conn.raddr.ip == '127.0.0.1'
if conn.raddr else False))
@pytest.mark.timeout(10)
@pytest.mark.asyncio
async def test_peers_bless_all(cluster_factory):
group_name = 'group'
c1_name = 'c1'
c2_name = 'c2'
master_cluster = await cluster_factory({
group_name: {'components': [c1_name]}})
slave_cluster = await cluster_factory({
group_name: {'components': [c2_name]}},
parent_infos=[master_cluster.server_info])
component1 = master_cluster.components[group_name][c1_name]
component2 = slave_cluster.components[group_name][c2_name]
master_ui_client = master_cluster.ui_client
slave_ui_client = slave_cluster.ui_client
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
ui_state_master = await master_ui_client.get_state()
ui_state_slave = await slave_ui_client.get_state()
assert components1 == components2
assert info1.blessing is not None
assert info2.blessing is not None
assert ui_state_master['components'] == ui_state_slave['components']
ui_info1 = common.find_ui_info(ui_state_master, info1)
ui_info2 = common.find_ui_info(ui_state_master, info2)
assert ui_info1 and ui_info2
assert ui_info1['blessing'] is not None
assert ui_info2['blessing'] is not None
component1.client.set_ready(info1.blessing)
component2.client.set_ready(info2.blessing)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
ui_state_master = await master_ui_client.get_state()
assert info1.ready == info1.blessing
assert info2.ready == info2.blessing
ui_info1 = common.find_ui_info(ui_state_master, info1)
ui_info2 = common.find_ui_info(ui_state_master, info2)
assert ui_info1 and ui_info2
assert ui_info1['ready'] == ui_info1['blessing']
assert ui_info2['ready'] == ui_info2['blessing']
await master_ui_client.set_rank(info1, 6)
await master_ui_client.set_rank(info2, 2)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
ui_state_master = await master_ui_client.get_state()
assert info1.blessing is not None and info2.blessing is not None
assert info1.rank == 6
assert info2.rank == 2
@pytest.mark.timeout(10)
@pytest.mark.asyncio
async def test_peers_bless_one(cluster_factory):
group_name = 'group'
c1_name = 'c1'
c2_name = 'c2'
master_cluster = await cluster_factory({
group_name: {'components': [c1_name]}},
default_algorithm='BLESS_ONE')
slave_cluster = await cluster_factory({
group_name: {'components': [c2_name]}},
parent_infos=[master_cluster.server_info])
component1 = master_cluster.components[group_name][c1_name]
component2 = slave_cluster.components[group_name][c2_name]
master_ui_client = master_cluster.ui_client
slave_ui_client = slave_cluster.ui_client
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
master_ui_state = await master_ui_client.get_state()
slave_ui_state = await slave_ui_client.get_state()
assert master_ui_state['components'] == slave_ui_state['components']
ui_info1 = common.find_ui_info(master_ui_state, info1)
ui_info2 = common.find_ui_info(master_ui_state, info2)
assert info1.blessing is not None
assert info2.blessing is None
assert ui_info1['blessing'] == info1.blessing
assert ui_info2['blessing'] is None
component1.client.set_ready(info1.blessing)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
master_ui_state = await master_ui_client.get_state()
slave_ui_state = await slave_ui_client.get_state()
assert master_ui_state['components'] == slave_ui_state['components']
ui_info1 = common.find_ui_info(master_ui_state, info1)
assert info1.ready == info1.blessing
assert ui_info1['ready'] == info1.ready
assert ui_info1['blessing'] == ui_info1['ready']
await master_ui_client.set_rank(info1, 10)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
master_ui_state = await master_ui_client.get_state()
slave_ui_state = await slave_ui_client.get_state()
assert master_ui_state['components'] == slave_ui_state['components']
ui_info1 = common.find_ui_info(master_ui_state, info1)
ui_info2 = common.find_ui_info(master_ui_state, info2)
assert info1.blessing is None
assert info2.blessing is None
assert ui_info1['blessing'] is None
assert ui_info2['blessing'] is None
component1.client.set_ready(None)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
master_ui_state = await master_ui_client.get_state()
slave_ui_state = await slave_ui_client.get_state()
assert master_ui_state['components'] == slave_ui_state['components']
ui_info1 = common.find_ui_info(master_ui_state, info1)
ui_info2 = common.find_ui_info(master_ui_state, info2)
assert info1.blessing is None
assert info2.blessing is not None
assert ui_info1['blessing'] is None
assert ui_info2['blessing'] == info2.blessing
component2.client.set_ready(info2.blessing)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
master_ui_state = await master_ui_client.get_state()
slave_ui_state = await slave_ui_client.get_state()
assert master_ui_state['components'] == slave_ui_state['components']
ui_info1 = common.find_ui_info(master_ui_state, info1)
ui_info2 = common.find_ui_info(master_ui_state, info2)
assert info1.blessing is None
assert info2.blessing is not None and info2.blessing == info2.ready
assert ui_info1['blessing'] is None
assert ui_info2['ready'] == info2.ready
@pytest.mark.timeout(10)
@pytest.mark.asyncio
async def test_peers_bless_all_group(cluster_factory):
group_name = 'group'
c1_name = 'c1'
c2_name = 'c2'
master_cluster = await cluster_factory({
group_name: {
'components': [c1_name],
'algorithm': 'BLESS_ALL'}},
default_algorithm='BLESS_ONE')
slave_cluster = await cluster_factory({
group_name: {'components': [c2_name]}},
parent_infos=[master_cluster.server_info])
component1 = master_cluster.components[group_name][c1_name]
component2 = slave_cluster.components[group_name][c2_name]
master_ui_client = master_cluster.ui_client
slave_ui_client = slave_cluster.ui_client
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
master_ui_state = await master_ui_client.get_state()
slave_ui_state = await slave_ui_client.get_state()
assert components1 == components2
assert info1.blessing is not None
assert info2.blessing is not None
assert master_ui_state['components'] == slave_ui_state['components']
ui_info1 = common.find_ui_info(master_ui_state, info1)
ui_info2 = common.find_ui_info(master_ui_state, info2)
assert ui_info1 and ui_info2
assert ui_info1['blessing']
assert ui_info2['blessing']
component1.client.set_ready(info1.blessing)
component2.client.set_ready(info2.blessing)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
master_ui_state = await master_ui_client.get_state()
slave_ui_state = await slave_ui_client.get_state()
assert info1.ready == info1.blessing
assert info2.ready == info2.blessing
assert master_ui_state['components'] == slave_ui_state['components']
ui_info1 = common.find_ui_info(master_ui_state, info1)
ui_info2 = common.find_ui_info(master_ui_state, info2)
assert ui_info1 and ui_info2
assert ui_info1['ready'] == ui_info1['blessing']
assert ui_info2['ready'] == ui_info2['blessing']
await master_ui_client.set_rank(info1, 6)
await master_ui_client.set_rank(info2, 2)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
master_ui_state = await master_ui_client.get_state()
assert info1.blessing is not None and info2.blessing is not None
assert info1.rank == 6
assert info2.rank == 2
@pytest.mark.timeout(10)
@pytest.mark.asyncio
async def test_peers_bless_one_group(cluster_factory):
group_name = 'group'
c1_name = 'c1'
c2_name = 'c2'
master_cluster = await cluster_factory({
group_name: {
'components': [c1_name],
'algorithm': 'BLESS_ONE'}})
slave_cluster = await cluster_factory({
group_name: {'components': [c2_name]}},
parent_infos=[master_cluster.server_info])
component1 = master_cluster.components[group_name][c1_name]
component2 = slave_cluster.components[group_name][c2_name]
master_ui_client = master_cluster.ui_client
slave_ui_client = slave_cluster.ui_client
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
master_ui_state = await master_ui_client.get_state()
slave_ui_state = await slave_ui_client.get_state()
assert master_ui_state['components'] == slave_ui_state['components']
ui_info1 = common.find_ui_info(master_ui_state, info1)
ui_info2 = common.find_ui_info(master_ui_state, info2)
assert info1.blessing is not None
assert info2.blessing is None
assert ui_info1['blessing'] == info1.blessing
assert ui_info2['blessing'] is None
component1.client.set_ready(info1.blessing)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
master_ui_state = await master_ui_client.get_state()
slave_ui_state = await slave_ui_client.get_state()
assert master_ui_state['components'] == slave_ui_state['components']
ui_info1 = common.find_ui_info(master_ui_state, info1)
assert info1.ready == info1.blessing
assert ui_info1['ready'] == info1.ready
assert ui_info1['blessing'] == ui_info1['ready']
await master_ui_client.set_rank(info1, 10)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
master_ui_state = await master_ui_client.get_state()
slave_ui_state = await slave_ui_client.get_state()
assert master_ui_state['components'] == slave_ui_state['components']
ui_info1 = common.find_ui_info(master_ui_state, info1)
ui_info2 = common.find_ui_info(master_ui_state, info2)
assert info1.blessing is None
assert info2.blessing is None
assert ui_info1['blessing'] is None
assert ui_info2['blessing'] is None
component1.client.set_ready(None)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
master_ui_state = await master_ui_client.get_state()
slave_ui_state = await slave_ui_client.get_state()
assert master_ui_state['components'] == slave_ui_state['components']
ui_info1 = common.find_ui_info(master_ui_state, info1)
ui_info2 = common.find_ui_info(master_ui_state, info2)
assert info1.blessing is None
assert info2.blessing is not None
assert ui_info1['blessing'] is None
assert ui_info2['blessing'] == info2.blessing
component2.client.set_ready(info2.blessing)
await asyncio.sleep(0.5)
info1, components1 = await component1.newest_state()
info2, components2 = await component2.newest_state()
master_ui_state = await master_ui_client.get_state()
slave_ui_state = await slave_ui_client.get_state()
assert master_ui_state['components'] == slave_ui_state['components']
ui_info1 = common.find_ui_info(master_ui_state, info1)
ui_info2 = common.find_ui_info(master_ui_state, info2)
assert info1.blessing is None
assert ui_info2['ready'] == info2.ready
@pytest.mark.timeout(10)
@pytest.mark.asyncio
@pytest.mark.parametrize('cluster_confs', [
[{
'groups': {
'target_group': {
'components': ['c1', 'c2', 'c3']}},
'default_algorithm': 'BLESS_ALL'}],
[{
'groups': {
'target_group': {
'components': ['c1', 'c2', 'c3'],
'algorithm': 'BLESS_ALL'}},
'default_algorithm': 'BLESS_ONE'}],
[{
'groups': {
'target_group': {
'components': ['c1', 'c2']}},
'default_algorithm': 'BLESS_ALL'},
{
'groups': {
'target_group': {
'components': ['c3', 'c4']}},
'default_algorithm': 'BLESS_ALL'}],
[{
'groups': {
'target_group': {
'components': ['c1', 'c2'],
'algorithm': 'BLESS_ALL'}},
'default_algorithm': 'BLESS_ONE'},
{
'groups': {
'target_group': {
'components': ['c3', 'c4']}},
'default_algorithm': 'BLESS_ALL'}],
[{
'groups': {
'target_group': {
'components': ['c1', 'c2'],
'algorithm': 'BLESS_ALL'}},
'default_algorithm': 'BLESS_ONE'},
{
'groups': {
'target_group': {
'components': ['c3', 'c4'],
'algorithm': 'BLESS_ONE'}},
'default_algorithm': 'BLESS_ONE'}],
])
async def test_bless_all_behavior(cluster_factory, cluster_confs):
clusters = []
target_components = []
ui_clients = []
for conf in cluster_confs:
cluster = await cluster_factory(
group_conf=conf['groups'],
default_algorithm=conf['default_algorithm'],
parent_infos=[cluster.server_info for cluster in clusters],
default_rank=1)
target_components.extend(
cluster.components['target_group'].values())
clusters.append(cluster)
ui_clients.append(cluster.ui_client)
await asyncio.sleep(0.5)
ui_states = [await ui_client.get_state() for ui_client in ui_clients]
assert all([state['components'] == ui_states[0]['components']
for state in ui_states])
master_ui_state = ui_states[0]
for component in target_components:
info, components = await component.newest_state()
ui_info = common.find_ui_info(master_ui_state, info)
assert info.blessing is not None
assert ui_info['blessing'] is not None
assert info.blessing == ui_info['blessing']
component.client.set_ready(info.blessing)
await asyncio.sleep(0.5)
ui_states = [await ui_client.get_state() for ui_client in ui_clients]
assert all([state['components'] == ui_states[0]['components']
for state in ui_states])
master_ui_state = ui_states[0]
for component in target_components:
info, components = await component.newest_state()
ui_info = common.find_ui_info(master_ui_state, info)
assert info.blessing is not None
assert info.ready is not None
assert info.blessing == info.ready
assert ui_info['blessing'] is not None
assert ui_info['ready'] is not None
assert ui_info['blessing'] == ui_info['ready']
assert ui_info['blessing'] == info.ready
@pytest.mark.timeout(10)
@pytest.mark.asyncio
@pytest.mark.parametrize('cluster_confs', [
[{
'groups': {
'target_group': {
'components': ['c1', 'c2', 'c3']}},
'default_algorithm': 'BLESS_ONE'}],
[{
'groups': {
'target_group': {
'components': ['c1', 'c2', 'c3'],
'algorithm': 'BLESS_ONE'}},
'default_algorithm': 'BLESS_ALL'}],
[{
'groups': {
'target_group': {
'components': ['c1', 'c2']}},
'default_algorithm': 'BLESS_ONE'},
{
'groups': {
'target_group': {
'components': ['c3', 'c4']}},
'default_algorithm': 'BLESS_ONE'}],
[{
'groups': {
'target_group': {
'components': ['c1', 'c2'],
'algorithm': 'BLESS_ONE'}},
'default_algorithm': 'BLESS_ONE'},
{
'groups': {
'target_group': {
'components': ['c3', 'c4']}},
'default_algorithm': 'BLESS_ONE'}],
[{
'groups': {
'target_group': {
'components': ['c1', 'c2'],
'algorithm': 'BLESS_ONE'}},
'default_algorithm': 'BLESS_ALL'},
{
'groups': {
'target_group': {
'components': ['c3', 'c4'],
'algorithm': 'BLESS_ALL'}},
'default_algorithm': 'BLESS_ALL'}],
])
async def test_bless_one_behavior(cluster_factory, cluster_confs):
clusters = []
target_components = []
ui_clients = []
for conf in cluster_confs:
cluster = await cluster_factory(
group_conf=conf['groups'],
default_algorithm=conf['default_algorithm'],
parent_infos=[cluster.server_info for cluster in clusters],
default_rank=1)
target_components.extend(
cluster.components['target_group'].values())
clusters.append(cluster)
ui_clients.append(cluster.ui_client)
await asyncio.sleep(0.5)
ui_states = [await ui_client.get_state() for ui_client in ui_clients]
assert all([state['components'] == ui_states[0]['components']
for state in ui_states])
ui_state = ui_states[0]
blessings_count = 0
for component in target_components:
info, components = await component.newest_state()
ui_info = common.find_ui_info(ui_state, info)
if info.blessing is not None:
assert ui_info['blessing'] == info.blessing
blessings_count += 1
else:
assert ui_info['blessing'] is None
component.client.set_ready(info.blessing)
assert blessings_count == 1
await asyncio.sleep(0.5)
ui_states = [await ui_client.get_state() for ui_client in ui_clients]
assert all([state['components'] == ui_states[0]['components']
for state in ui_states])
ui_state = ui_states[0]
blessed_index = None
for (i, component) in enumerate(target_components):
info, components = await component.newest_state()
ui_info = common.find_ui_info(ui_state, info)
if info.blessing is not None:
assert info.blessing == info.ready
assert ui_info['ready'] == info.ready
assert ui_info['blessing'] == ui_info['ready']
await ui_clients[0].set_rank(info, 5)
blessed_index = i
else:
assert info.ready is None
assert ui_info['ready'] is None
assert ui_info['blessing'] is None
await asyncio.sleep(0.5)
ui_states = [await ui_client.get_state() for ui_client in ui_clients]
assert all([state['components'] == ui_states[0]['components']
for state in ui_states])
ui_state = ui_states[0]
for (i, component) in enumerate(target_components):
info, components = await component.newest_state()
ui_info = common.find_ui_info(ui_state, info)
assert info.blessing is None
assert ui_info['blessing'] is None
component.client.set_ready(info.blessing)
await asyncio.sleep(0.5)
ui_states = [await ui_client.get_state() for ui_client in ui_clients]
assert all([state['components'] == ui_states[0]['components']
for state in ui_states])
ui_state = ui_states[0]
blessings_count = 0
for (i, component) in enumerate(target_components):
info, components = await component.newest_state()
ui_info = common.find_ui_info(ui_state, info)
if info.blessing is not None:
assert i != blessed_index
assert ui_info['blessing'] == info.blessing
blessings_count += 1
component.client.set_ready(info.blessing)
assert blessings_count == 1
| 35.55836
| 76
| 0.681186
| 4,409
| 33,816
| 4.957814
| 0.025403
| 0.046434
| 0.035134
| 0.046846
| 0.941443
| 0.932156
| 0.923098
| 0.91006
| 0.893957
| 0.883801
| 0
| 0.030772
| 0.211971
| 33,816
| 950
| 77
| 35.595789
| 0.789515
| 0
| 0
| 0.884021
| 0
| 0
| 0.07804
| 0
| 0
| 0
| 0
| 0
| 0.271907
| 1
| 0.002577
| false
| 0
| 0.005155
| 0
| 0.007732
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7a892993e34028538ff0037a127c5626a3d6b6ee
| 24,230
|
py
|
Python
|
webapp/tests/forms/steps/lotse/test_merkzeichen.py
|
digitalservice4germany/steuerlotse
|
ef3e094e4d7d4768431a50ac4be60672cd03221d
|
[
"MIT"
] | 20
|
2021-07-02T07:49:08.000Z
|
2022-03-18T22:26:10.000Z
|
webapp/tests/forms/steps/lotse/test_merkzeichen.py
|
digitalservice4germany/steuerlotse
|
ef3e094e4d7d4768431a50ac4be60672cd03221d
|
[
"MIT"
] | 555
|
2021-06-28T15:35:15.000Z
|
2022-03-31T11:51:55.000Z
|
webapp/tests/forms/steps/lotse/test_merkzeichen.py
|
digitalservice4germany/steuerlotse
|
ef3e094e4d7d4768431a50ac4be60672cd03221d
|
[
"MIT"
] | 1
|
2021-07-04T20:34:12.000Z
|
2021-07-04T20:34:12.000Z
|
import datetime
import pytest
from flask.sessions import SecureCookieSession
from flask_babel import ngettext, lazy_gettext as _l
from pydantic import ValidationError
from werkzeug.datastructures import ImmutableMultiDict, MultiDict
from app.forms.flows.lotse_step_chooser import LotseStepChooser, _LOTSE_DATA_KEY
from app.forms.steps.lotse.merkzeichen import StepMerkzeichenPersonA, StepMerkzeichenPersonB, \
HasMerkzeichenPersonAPrecondition, HasMerkzeichenPersonBPrecondition
from tests.utils import create_session_form_data
_POSITIVE_CHECKBOX_VALUE = 'on' # The value in standard checkboxes is 'on'.
def new_merkzeichen_person_a_step(form_data):
return LotseStepChooser().get_correct_step(StepMerkzeichenPersonA.name, True, ImmutableMultiDict(form_data))
@pytest.fixture
def test_request_context_with_person_a_disability(new_test_request_context):
with new_test_request_context(stored_data={'person_a_has_disability': 'yes'}) as req:
yield req
@pytest.mark.usefixtures('test_request_context_with_person_a_disability')
class TestStepMerkzeichenPersonAValidation:
@pytest.fixture()
def valid_form_data(self):
return {'person_a_has_pflegegrad': 'no'}
def test_if_has_pflegegrad_not_given_then_fail_validation(self):
data = MultiDict({})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is False
def test_if_has_pflegegrad_given_then_succ_validation(self):
data = MultiDict({'person_a_has_pflegegrad': 'no'})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_disability_degree_has_allowed_value_then_succ_validation(self):
for allowed_value in [20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95, 100]:
data = MultiDict({'person_a_has_pflegegrad': 'no',
'person_a_disability_degree': allowed_value})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_disability_degree_has_not_allowed_value_then_fail_validation(self):
for not_allowed_value in [21, 105]:
data = MultiDict({'person_a_has_pflegegrad': 'no',
'person_a_disability_degree': not_allowed_value})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is False
def test_if_disability_degree_zero_and_has_no_merkzeichen_g_or_ag_then_succ_validation(self):
data = MultiDict({'person_a_has_pflegegrad': 'no',
'person_a_disability_degree': 0,
'person_a_has_merkzeichen_g': False,
'person_a_has_merkzeichen_ag': False})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_disability_degree_below_20_and_not_zero_and_has_no_merkzeichen_g_or_ag_then_fail_validation(self):
for not_allowed_value in [1, 19]:
data = MultiDict({'person_a_has_pflegegrad': 'no',
'person_a_disability_degree': not_allowed_value,
'person_a_has_merkzeichen_g': False,
'person_a_has_merkzeichen_ag': False})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is False
def test_if_disability_degree_below_20_and_has_merkzeichen_g_then_fail_validation(self):
for not_allowed_value in [0, 1, 19]:
data = MultiDict({'person_a_has_pflegegrad': 'no',
'person_a_disability_degree': not_allowed_value,
'person_a_has_merkzeichen_g': True})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is False
def test_if_disability_degree_below_20_and_has_merkzeichen_ag_then_fail_validation(self):
for not_allowed_value in [0, 1, 19]:
data = MultiDict({'person_a_has_pflegegrad': 'no',
'person_a_disability_degree': not_allowed_value,
'person_a_has_merkzeichen_ag': True})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is False
def test_if_merkzeichen_g_and_ag_and_disability_degree_not_set_then_succ_validation(self, valid_form_data):
data = MultiDict(valid_form_data)
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_merkzeichen_g_set_and_disability_degree_not_set_then_fail_validation_with_correct_message(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_a_has_merkzeichen_g': _POSITIVE_CHECKBOX_VALUE}})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is False
assert form.errors['person_a_disability_degree'] == [_l('form.lotse.validation-disability_degree.merkzeichen_g_selected.required')]
def test_if_merkzeichen_g_set_and_disability_degree_set_then_succ_validation(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_a_has_merkzeichen_g': _POSITIVE_CHECKBOX_VALUE, 'person_a_disability_degree': 20}})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_merkzeichen_ag_set_and_disability_degree_not_set_then_fail_validation_with_correct_message(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_a_has_merkzeichen_ag': _POSITIVE_CHECKBOX_VALUE}})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is False
assert form.errors['person_a_disability_degree'] == [_l('form.lotse.validation-disability_degree.merkzeichen_ag_selected.required')]
def test_if_merkzeichen_ag_set_and_disability_degree_set_then_succ_validation(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_a_has_merkzeichen_ag': _POSITIVE_CHECKBOX_VALUE, 'person_a_disability_degree': 20}})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_merkzeichen_g_and_ag_set_and_disability_degree_set_then_succ_validation(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_a_has_merkzeichen_g': _POSITIVE_CHECKBOX_VALUE,
'person_a_has_merkzeichen_ag': _POSITIVE_CHECKBOX_VALUE,
'person_a_disability_degree': 20}})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_merkzeichen_g_and_ag_not_set_but_disability_degree_set_then_succ_validation(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_a_disability_degree': 20}})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_merkzeichen_g_set_and_disability_degree_under_20_then_fail_validation(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_a_has_merkzeichen_g': _POSITIVE_CHECKBOX_VALUE, 'person_a_disability_degree': 15}})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is False
def test_if_merkzeichen_ag_set_and_disability_degree_under_20_then_fail_validation(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_a_has_merkzeichen_ag': _POSITIVE_CHECKBOX_VALUE, 'person_a_disability_degree': 15}})
form = new_merkzeichen_person_a_step(form_data=data).render_info.form
assert form.validate() is False
class TestStepMerkzeichenPersonATexts:
def test_if_multiple_users_then_show_multiple_title(self, new_test_request_context):
expected_step_title = ngettext('form.lotse.merkzeichen_person_a.title', 'form.lotse.merkzeichen_person_a.title',
num=2)
session_data = {
'familienstand': 'married',
'familienstand_date': datetime.date(2000, 1, 31),
'familienstand_married_lived_separated': 'no',
'familienstand_confirm_zusammenveranlagung': True,
'person_a_has_disability': 'yes',
}
with new_test_request_context(stored_data=session_data):
step = new_merkzeichen_person_a_step({})
step._pre_handle()
assert step.title == expected_step_title
def test_if_multiple_users_then_show_multiple_label(self, new_test_request_context):
expected_step_label = ngettext('form.lotse.merkzeichen_person_a.label', 'form.lotse.merkzeichen_person_a.label',
num=2)
session_data = {
'familienstand': 'married',
'familienstand_date': datetime.date(2000, 1, 31),
'familienstand_married_lived_separated': 'no',
'familienstand_confirm_zusammenveranlagung': True,
'person_a_has_disability': 'yes',
}
with new_test_request_context(stored_data=session_data):
step = new_merkzeichen_person_a_step({})
step._pre_handle()
assert step.label == expected_step_label
def test_if_single_user_then_show_single_title(self, new_test_request_context):
expected_step_title = ngettext('form.lotse.merkzeichen_person_a.title', 'form.lotse.merkzeichen_person_a.title',
num=1)
session_data = {
'familienstand': 'single',
'person_a_has_disability': 'yes',
}
with new_test_request_context(stored_data=session_data):
step = new_merkzeichen_person_a_step({})
step._pre_handle()
assert step.title == expected_step_title
def test_if_single_user_then_show_single_label(self, new_test_request_context):
expected_step_label = ngettext('form.lotse.merkzeichen_person_a.label', 'form.lotse.merkzeichen_person_a.label',
num=1)
session_data = {
'familienstand': 'single',
'person_a_has_disability': 'yes',
}
with new_test_request_context(stored_data=session_data):
step = new_merkzeichen_person_a_step({})
step._pre_handle()
assert step.label == expected_step_label
def new_merkzeichen_person_b_step(form_data):
return LotseStepChooser().get_correct_step(StepMerkzeichenPersonB.name, True, ImmutableMultiDict(form_data))
@pytest.fixture
def test_request_context_with_person_b_disability(app):
with app.test_request_context(method="POST") as req:
req.session = SecureCookieSession({_LOTSE_DATA_KEY: create_session_form_data({'person_b_has_disability': 'yes'})})
yield req
@pytest.mark.usefixtures('test_request_context_with_person_b_disability')
class TestStepMerkzeichenPersonBValidation:
@pytest.fixture()
def valid_form_data(self):
return {'person_b_has_pflegegrad': 'no'}
def test_if_has_pflegegrad_not_given_then_fail_validation(self):
data = MultiDict({})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is False
def test_if_has_pflegegrad_given_then_succ_validation(self):
data = MultiDict({'person_b_has_pflegegrad': 'no'})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_disability_degree_has_allowed_value_then_succ_validation(self):
for allowed_value in [20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90, 95, 100]:
data = MultiDict({'person_b_has_pflegegrad': 'no', 'person_b_disability_degree': allowed_value})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_disability_degree_has_not_allowed_value_then_fail_validation(self):
for not_allowed_value in [21, 105]:
data = MultiDict({'person_b_has_pflegegrad': 'no',
'person_b_disability_degree': not_allowed_value})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is False
def test_if_disability_degree_zero_and_has_no_merkzeichen_g_or_ag_then_succ_validation(self):
data = MultiDict({'person_b_has_pflegegrad': 'no',
'person_b_disability_degree': 0,
'person_b_has_merkzeichen_g': False,
'person_b_has_merkzeichen_ag': False})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_disability_degree_below_20_and_not_zero_and_has_no_merkzeichen_g_or_ag_then_fail_validation(self):
for not_allowed_value in [1, 19]:
data = MultiDict({'person_b_has_pflegegrad': 'no',
'person_b_disability_degree': not_allowed_value,
'person_b_has_merkzeichen_g': False,
'person_b_has_merkzeichen_ag': False})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is False
def test_if_disability_degree_below_20_and_has_merkzeichen_g_then_fail_validation(self):
for not_allowed_value in [0, 1, 19]:
data = MultiDict({'person_b_has_pflegegrad': 'no',
'person_b_disability_degree': not_allowed_value,
'person_b_has_merkzeichen_g': True})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is False
def test_if_disability_degree_below_20_and_has_merkzeichen_ag_then_fail_validation(self):
for not_allowed_value in [0, 1, 19]:
data = MultiDict({'person_b_has_pflegegrad': 'no',
'person_b_disability_degree': not_allowed_value,
'person_b_has_merkzeichen_ag': True})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is False
def test_if_merkzeichen_g_and_ag_and_disability_degree_not_set_then_succ_validation(self, valid_form_data):
data = MultiDict(valid_form_data)
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_merkzeichen_g_set_and_disability_degree_not_set_then_fail_validation_with_correct_message(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_b_has_merkzeichen_g': _POSITIVE_CHECKBOX_VALUE}})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is False
assert form.errors['person_b_disability_degree'] == [_l('form.lotse.validation-disability_degree.merkzeichen_g_selected.required')]
def test_if_merkzeichen_g_set_and_disability_degree_set_then_succ_validation(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_b_has_merkzeichen_g': _POSITIVE_CHECKBOX_VALUE, 'person_b_disability_degree': 20}})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_merkzeichen_ag_set_and_disability_degree_not_set_then_fail_validation_with_correct_message(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_b_has_merkzeichen_ag': _POSITIVE_CHECKBOX_VALUE}})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is False
assert form.errors['person_b_disability_degree'] == [_l('form.lotse.validation-disability_degree.merkzeichen_ag_selected.required')]
def test_if_merkzeichen_ag_set_and_disability_degree_set_then_succ_validation(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_b_has_merkzeichen_ag': _POSITIVE_CHECKBOX_VALUE, 'person_b_disability_degree': 20}})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_merkzeichen_g_and_ag_set_and_disability_degree_set_then_succ_validation(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_b_has_merkzeichen_g': _POSITIVE_CHECKBOX_VALUE,
'person_b_has_merkzeichen_ag': _POSITIVE_CHECKBOX_VALUE,
'person_b_disability_degree': 20}})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_merkzeichen_g_and_ag_not_set_but_disability_degree_set_then_succ_validation(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_b_disability_degree': 20}})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is True
def test_if_merkzeichen_g_set_and_disability_degree_under_20_then_fail_validation(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_b_has_merkzeichen_g': _POSITIVE_CHECKBOX_VALUE, 'person_b_disability_degree': 15}})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is False
def test_if_merkzeichen_ag_set_and_disability_degree_under_20_then_fail_validation(self, valid_form_data):
data = MultiDict({**valid_form_data, **{'person_b_has_merkzeichen_ag': _POSITIVE_CHECKBOX_VALUE, 'person_b_disability_degree': 15}})
form = new_merkzeichen_person_b_step(form_data=data).render_info.form
assert form.validate() is False
class TestHasMerkzeichenPersonAPrecondition:
def test_if_person_a_has_no_merkzeichen_set_then_raise_validation_error(self):
data = {
'person_a_has_disability': 'yes',
}
with pytest.raises(ValidationError):
HasMerkzeichenPersonAPrecondition.parse_obj(data)
def test_if_person_a_has_pflegegrad_set_then_do_not_raise_validation_error(self):
data = {
'person_a_has_disability': 'yes',
'person_a_has_pflegegrad': 'yes'
}
try:
HasMerkzeichenPersonAPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
def test_if_person_a_disability_degree_set_then_do_not_raise_validation_error(self):
data = {
'person_a_has_disability': 'yes',
'person_a_disability_degree': 20
}
try:
HasMerkzeichenPersonAPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
def test_if_person_a_has_merkzeichen_g_set_then_do_not_raise_validation_error(self):
data = {
'person_a_has_disability': 'yes',
'person_a_has_merkzeichen_g': True
}
try:
HasMerkzeichenPersonAPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
def test_if_person_a_has_merkzeichen_ag_set_then_do_not_raise_validation_error(self):
data = {
'person_a_has_disability': 'yes',
'person_a_has_merkzeichen_ag': True
}
try:
HasMerkzeichenPersonAPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
def test_if_person_a_has_merkzeichen_bl_set_then_do_not_raise_validation_error(self):
data = {
'person_a_has_disability': 'yes',
'person_a_has_merkzeichen_bl': True,
}
try:
HasMerkzeichenPersonAPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
def test_if_person_a_has_merkzeichen_tbl_set_then_do_not_raise_validation_error(self):
data = {
'person_a_has_disability': 'yes',
'person_a_has_merkzeichen_tbl': True,
}
try:
HasMerkzeichenPersonAPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
def test_if_person_a_has_merkzeichen_h_set_then_do_not_raise_validation_error(self):
data = {
'person_a_has_disability': 'yes',
'person_a_has_merkzeichen_h': True
}
try:
HasMerkzeichenPersonAPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
class TestHasMerkzeichenPersonBPrecondition:
def test_if_person_b_has_no_merkzeichen_set_then_raise_validation_error(self):
data = {
'person_b_has_disability': 'yes',
}
with pytest.raises(ValidationError):
HasMerkzeichenPersonBPrecondition.parse_obj(data)
def test_if_person_b_has_pflegegrad_set_then_do_not_raise_validation_error(self):
data = {
'person_b_has_disability': 'yes',
'person_b_has_pflegegrad': 'yes',
}
try:
HasMerkzeichenPersonBPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
def test_if_person_b_disability_degree_set_then_do_not_raise_validation_error(self):
data = {
'person_b_has_disability': 'yes',
'person_b_disability_degree': 20,
}
try:
HasMerkzeichenPersonBPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
def test_if_person_b_has_merkzeichen_g_set_then_do_not_raise_validation_error(self):
data = {
'person_b_has_disability': 'yes',
'person_b_has_merkzeichen_g': True,
}
try:
HasMerkzeichenPersonBPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
def test_if_person_b_has_merkzeichen_ag_set_then_do_not_raise_validation_error(self):
data = {
'person_b_has_disability': 'yes',
'person_b_has_merkzeichen_ag': True,
}
try:
HasMerkzeichenPersonBPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
def test_if_person_b_has_merkzeichen_bl_set_then_do_not_raise_validation_error(self):
data = {
'person_b_has_disability': 'yes',
'person_b_has_merkzeichen_bl': True
}
try:
HasMerkzeichenPersonBPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
def test_if_person_b_has_merkzeichen_tbl_set_then_do_not_raise_validation_error(self):
data = {
'person_b_has_disability': 'yes',
'person_b_has_merkzeichen_tbl': True,
}
try:
HasMerkzeichenPersonBPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
def test_if_person_b_has_merkzeichen_h_set_then_do_not_raise_validation_error(self):
data = {
'person_b_has_disability': 'yes',
'person_b_has_merkzeichen_h': True
}
try:
HasMerkzeichenPersonBPrecondition.parse_obj(data)
except ValidationError:
pytest.fail("Should not raise a validation error")
| 47.790927
| 140
| 0.703219
| 3,019
| 24,230
| 5.123551
| 0.055316
| 0.043445
| 0.03142
| 0.052754
| 0.93729
| 0.933799
| 0.92798
| 0.912658
| 0.898371
| 0.892552
| 0
| 0.008981
| 0.218778
| 24,230
| 507
| 141
| 47.790927
| 0.808178
| 0.001692
| 0
| 0.70398
| 0
| 0
| 0.175955
| 0.146271
| 0
| 0
| 0
| 0
| 0.104478
| 1
| 0.149254
| false
| 0
| 0.022388
| 0.00995
| 0.19403
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7aa08fc66955a759d90000d4aa623d83cb6fe2c6
| 2,108
|
py
|
Python
|
tests/test_Downloader.py
|
korn-alex/toolbox
|
e0d8038d9579d59340020cc29de8cfc1f88425a1
|
[
"MIT"
] | null | null | null |
tests/test_Downloader.py
|
korn-alex/toolbox
|
e0d8038d9579d59340020cc29de8cfc1f88425a1
|
[
"MIT"
] | null | null | null |
tests/test_Downloader.py
|
korn-alex/toolbox
|
e0d8038d9579d59340020cc29de8cfc1f88425a1
|
[
"MIT"
] | null | null | null |
import unittest
from pathlib import Path
# print(Path.cwd())
from toolbox.web import Downloader
class TestDownloader(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('setUpClass')
def setUp(self):
self.d = Downloader()
self.url = Path('https://images.pexels.com/photos/459793/pexels-photo-459793.jpeg?auto=compress&cs=tinysrgb&dpr=2&h=750&w=1260')
def test_make_name(self):
self.assertEqual(self.d._make_name(self.url,'Полное собрание сочинений. Братья Карамазовы. Части II-III, Федор Достоевский.djvu'),'Полное собрание сочинений. Братья Карамазовы. Части II-III, Федор Достоевский.djvu')
self.assertEqual(self.d._make_name(self.url,'_3_'),'_3_.jpeg')
self.assertEqual(self.d._make_name(self.url, None),'pexels-photo-459793.jpeg')
self.assertEqual(self.d._make_name(self.url,''),'pexels-photo-459793.jpeg')
self.assertEqual(self.d._make_name(self.url,'?'),'pexels-photo-459793.jpeg')
self.assertEqual(self.d._make_name(self.url,'?..?..?'),'pexels-photo-459793.jpeg')
self.assertEqual(self.d._make_name(self.url,'?..?..?**\\//- -'),'pexels-photo-459793.jpeg')
self.assertEqual(self.d._make_name(self.url,'___'),'___.jpeg')
self.assertEqual(self.d._make_name(self.url,'_3.png_'),'_3.png_')
self.assertEqual(self.d._make_name(self.url,'_3.png*'),'_3.jpeg')
self.assertEqual(self.d._make_name(self.url,'ok'),'ok.jpeg')
self.assertEqual(self.d._make_name(self.url,'ok.jpeg'),'ok.jpeg')
self.assertEqual(self.d._make_name(self.url,'ok.png'),'ok.png')
self.assertEqual(self.d._make_name(self.url,'ok.png.jpeg'),'ok.png.jpeg')
self.assertEqual(self.d._make_name(self.url,123),'123.jpeg')
self.assertEqual(self.d._make_name(self.url,123.44),'123.44')
self.assertEqual(self.d._make_name(self.url,'123'),'123.jpeg')
self.assertEqual(self.d._make_name(self.url,'123.jpeg'),'123.jpeg')
self.assertEqual(self.d._make_name(self.url,'123.png'),'123.png')
if __name__ == '__main__':
unittest.main()
| 52.7
| 223
| 0.677419
| 305
| 2,108
| 4.472131
| 0.203279
| 0.073314
| 0.175953
| 0.278592
| 0.73607
| 0.73607
| 0.73607
| 0.73607
| 0.710411
| 0.676686
| 0
| 0.049342
| 0.134725
| 2,108
| 40
| 224
| 52.7
| 0.698465
| 0.008065
| 0
| 0
| 0
| 0.03125
| 0.285646
| 0.057416
| 0
| 0
| 0
| 0
| 0.59375
| 1
| 0.09375
| false
| 0
| 0.09375
| 0
| 0.21875
| 0.03125
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ab0387933acfa254802144573d963906129cbe8
| 114
|
py
|
Python
|
research/object_detection/python-tkinter-gui-master/GUITools/WindowTools/__init__.py
|
r08in279/Traffic-Management-Using-Drones
|
15fdba219cef04b3cb59a68901a8064c3795d8e3
|
[
"Apache-2.0"
] | null | null | null |
research/object_detection/python-tkinter-gui-master/GUITools/WindowTools/__init__.py
|
r08in279/Traffic-Management-Using-Drones
|
15fdba219cef04b3cb59a68901a8064c3795d8e3
|
[
"Apache-2.0"
] | null | null | null |
research/object_detection/python-tkinter-gui-master/GUITools/WindowTools/__init__.py
|
r08in279/Traffic-Management-Using-Drones
|
15fdba219cef04b3cb59a68901a8064c3795d8e3
|
[
"Apache-2.0"
] | null | null | null |
#WindowTools packaging file
from GUITools.WindowTools.Buttons import *
from GUITools.WindowTools.Windows import *
| 28.5
| 42
| 0.842105
| 13
| 114
| 7.384615
| 0.615385
| 0.25
| 0.479167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096491
| 114
| 3
| 43
| 38
| 0.932039
| 0.22807
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8f9c9a5c23ed8979faaa144a4b68d7b11375fc98
| 3,277
|
py
|
Python
|
result/migrations/nn/0015_auto_20190627_0733.py
|
0Jihad/uqhs
|
16e16742022142d47d0a423aa27ca50fe706a06b
|
[
"MIT"
] | null | null | null |
result/migrations/nn/0015_auto_20190627_0733.py
|
0Jihad/uqhs
|
16e16742022142d47d0a423aa27ca50fe706a06b
|
[
"MIT"
] | 11
|
2019-10-13T11:05:26.000Z
|
2022-03-11T23:48:57.000Z
|
result/migrations/nn/0015_auto_20190627_0733.py
|
0Jihad/uqhs
|
16e16742022142d47d0a423aa27ca50fe706a06b
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.3 on 2019-06-26 18:33
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('result', '0014_auto_20190627_0718'),
]
operations = [
migrations.AlterField(
model_name='overall_annual',
name='acc',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='acc', to='result.TERM'),
),
migrations.AlterField(
model_name='overall_annual',
name='agr',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='agr', to='result.TERM'),
),
migrations.AlterField(
model_name='overall_annual',
name='bst',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='bst', to='result.TERM'),
),
migrations.AlterField(
model_name='overall_annual',
name='bus',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='bus', to='result.TERM'),
),
migrations.AlterField(
model_name='overall_annual',
name='eng',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='eng', to='result.TERM'),
),
migrations.AlterField(
model_name='overall_annual',
name='his',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='his', to='result.TERM'),
),
migrations.AlterField(
model_name='overall_annual',
name='ict',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='arb', to='result.TERM'),
),
migrations.AlterField(
model_name='overall_annual',
name='irs',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='irs', to='result.TERM'),
),
migrations.AlterField(
model_name='overall_annual',
name='mat',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='mat', to='result.TERM'),
),
migrations.AlterField(
model_name='overall_annual',
name='nva',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='nva', to='result.TERM'),
),
migrations.AlterField(
model_name='overall_annual',
name='prv',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='prv', to='result.TERM'),
),
migrations.AlterField(
model_name='overall_annual',
name='yor',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='yor', to='result.TERM'),
),
]
| 43.693333
| 143
| 0.622521
| 382
| 3,277
| 5.175393
| 0.143979
| 0.056651
| 0.092059
| 0.144664
| 0.867982
| 0.867982
| 0.867982
| 0.844714
| 0.844714
| 0.844714
| 0
| 0.012455
| 0.240464
| 3,277
| 74
| 144
| 44.283784
| 0.78184
| 0.013732
| 0
| 0.529412
| 1
| 0
| 0.124149
| 0.007121
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029412
| 0
| 0.073529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8fe44b20abe3244f8b6eb1119635f14754a50213
| 11,505
|
py
|
Python
|
game2048/my_agents.py
|
snuffle-PX/2048-api
|
a43c74c3cfcf47e3f79ab631705b46ddbe3add1e
|
[
"Apache-2.0"
] | null | null | null |
game2048/my_agents.py
|
snuffle-PX/2048-api
|
a43c74c3cfcf47e3f79ab631705b46ddbe3add1e
|
[
"Apache-2.0"
] | null | null | null |
game2048/my_agents.py
|
snuffle-PX/2048-api
|
a43c74c3cfcf47e3f79ab631705b46ddbe3add1e
|
[
"Apache-2.0"
] | null | null | null |
"""
author: 赵阳桁
TrainAgent use buffer to train
TrainAgent2 train without buffer
TestAgent is a simple agent for webapp, evaluate and figerprint.
"""
from .agents import Agent
from utils import try_to_move, get_train_data, conv_to_onehot, ReplayMemory, Transition, conv_to_onehot_12, get_train_data_12
import numpy as np
import torch
import torch.nn
import torch.nn.functional as F
import torch.optim
from model import nn2048, nn2048_2, nn2048_3, nn2048_4
from .expectimax import board_to_move
import time
BATCH_SIZE = 64
learning_rate = 1e-4
THRESHOLD = 0.5
DEFAULT_PATH = 'model_dict.pkl'
class TrainAgent(Agent):
def __init__(self, game, display=None, train=True, load_data=False, path=None):
super().__init__(game, display)
self.train = train
self.statistics = {2 ** i: 0 for i in range(1, 16)}
self.threshold = THRESHOLD
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.step_counter = 0
self.error_counter = 0
self.diff_counter = 0
self.t = 0
if self.train:
self.teacher = board_to_move
if load_data:
if path is None:
path = DEFAULT_PATH
else:
pass
try:
self.net = nn2048_3().to(self.device)
self.net.load_state_dict(torch.load(path, map_location=self.device))
except FileNotFoundError:
print('No model loaded! Create new model')
self.net = nn2048_3().to(self.device)
else:
self.net = nn2048_3().to(self.device)
self.criterion = torch.nn.CrossEntropyLoss()
self.optimizer = torch.optim.Adam(self.net.parameters(), lr=learning_rate) # configure.learning_rate)
self.buffer = ReplayMemory(5 * BATCH_SIZE)
else:
try:
if path is None:
path = DEFAULT_PATH
self.net = nn2048_3().to(self.device)
self.net.load_state_dict(torch.load(path, map_location=self.device))
self.net.eval()
except FileNotFoundError:
print('No model loaded!')
self.net = nn2048_3().to(self.device)
def train_net(self, board, target_direction):
# target_direction = self.teacher(board)
train_data, train_targets = get_train_data(board, target_direction)
self.buffer.push6(train_data, train_targets)
transitions = self.buffer.sample(BATCH_SIZE)
batch = Transition(*zip(*transitions))
train_data = torch.Tensor(batch.board).to(self.device).float()
train_targets = torch.Tensor(batch.direction).to(self.device).long().squeeze(1)
# train_data = torch.Tensor(train_data).to(self.device).float()
# train_targets = torch.Tensor(train_targets).to(self.device).long().squeeze(1) # squeeze() delete the dimention of 1
y = self.net.forward(train_data)
loss = self.criterion(y, train_targets)
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
def step(self):
start = time.time()
board = self.game.board
oh_board = conv_to_onehot(board)
self.step_counter += 1
if self.train:
target_direction = self.teacher(board)
self.train_net(board, target_direction)
if np.random.rand() > self.threshold or self.game.score < 512 :
direction = self.net.predict(torch.Tensor(oh_board.reshape(1, *oh_board.shape)).to(self.device).float())
if direction != target_direction:
self.error_counter += 1
# _, score = try_to_move(board, direction)
# if score == -1: # cannot move to the selected direction
# # direction = target_direction
# #print("score -1")
# self.error_counter += 1
else:
direction = target_direction
else:
"""
Only test without train
"""
direction = self.net.predict(torch.Tensor(oh_board.reshape(1, *oh_board.shape)).to(self.device).float())
_, score = try_to_move(board, direction)
if score == -1: # cannot move to the selected direction
self.error_counter += 1
if direction != board_to_move(board):
self.diff_counter += 1
# direction = board_to_move(board)
# print("score -1")
end = time.time()
self.t += start - end
return direction
def play(self, max_iter=np.inf, verbose=False):
super(TrainAgent, self).play(max_iter=max_iter, verbose=verbose)
self.statistics[self.game.score] += 1
def new_game(self, game):
self.game = game
class TrainAgent2(TrainAgent):
def train_net(self, board, target_direction):
train_data, train_targets = get_train_data(board, target_direction)
train_data = torch.Tensor(train_data).to(self.device).float()
train_targets = torch.Tensor(train_targets).to(self.device).long().squeeze(1) #
y = self.net.forward(train_data)
loss = self.criterion(y, train_targets)
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
class RLAgent(Agent):
def __init__(self, game, display=None, train=True, load_data=False, path=None):
super().__init__(game, display)
self.train = train
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.optimizer = torch.optim.Adam(betas=(0.5, 0.999))
self.criterion = torch.nn.MSELoss(size_average=True)
self.net = nn2048()
self.last_board = None
def step(self):
board = self.game.board
oh_board = conv_to_onehot(board)
board_list = []
for d in range(4):
_, score = try_to_move(board, d)
if score >= 0:
board_list.append((d, score))
if board_list:
s = [self.net.predict(torch.Tensor(oh_board.reshape(1, *oh_board.shape)).to(self.device).float()) + score*2
for d, score in board_list]
idx = np.argmax(s)
value = np.max(s)
direction = board_list[idx][1]
if self.train and any(self.last_board):
self.train_net(self.last_board, value)
self.last_board = board
return direction
def train_net(self, last_board, value):
train_data, _ = get_train_data(last_board, 0)
train_targets = np.array([value]*6).reshape(-1, 1)
train_data = torch.Tensor(train_data).to(self.device).float()
train_targets = torch.Tensor(train_targets).to(self.device).long().squeeze(1) #
y = self.net.forward(train_data)
loss = self.criterion(y, train_targets)
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
class TrainAgent_12(Agent):
def __init__(self, game, display=None, train=True, load_data=False, path=None):
super().__init__(game, display)
self.train = train
self.statistics = {2 ** i: 0 for i in range(1, 16)}
self.threshold = THRESHOLD
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.step_counter = 0
self.error_counter = 0
self.diff_counter = 0
self.t = 0
if self.train:
self.teacher = board_to_move
if load_data:
if path is None:
path = DEFAULT_PATH
else:
pass
try:
self.net = nn2048_4().to(self.device)
self.net.load_state_dict(torch.load(path, map_location=self.device))
except FileNotFoundError:
print('No model loaded! Create new model')
self.net = nn2048_4().to(self.device)
else:
self.net = nn2048_4().to(self.device)
self.criterion = torch.nn.CrossEntropyLoss()
self.optimizer = torch.optim.Adam(self.net.parameters(), lr=learning_rate) # configure.learning_rate)
# self.buffer = ReplayMemory(5 * BATCH_SIZE)
else:
try:
if path is None:
path = DEFAULT_PATH
self.net = nn2048_4().to(self.device)
self.net.load_state_dict(torch.load(path, map_location=self.device))
self.net.eval()
except FileNotFoundError:
print('No model loaded!')
self.net = nn2048_4().to(self.device)
def train_net(self, board, target_direction):
train_data, train_targets = get_train_data_12(board, target_direction)
train_data = torch.Tensor(train_data).to(self.device).float()
train_targets = torch.Tensor(train_targets).to(self.device).long().squeeze(1) #
y = self.net.forward(train_data)
loss = self.criterion(y, train_targets)
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
def step(self):
start = time.time()
board = self.game.board
oh_board = conv_to_onehot_12(board)
self.step_counter += 1
if self.train:
target_direction = self.teacher(board)
self.train_net(board, target_direction)
if np.random.rand() > self.threshold or self.game.score < 512:
direction = self.net.predict(torch.Tensor(oh_board.reshape(1, *oh_board.shape)).to(self.device).float())
if direction != target_direction:
self.error_counter += 1
else:
direction = target_direction
else:
"""
Only test without train
"""
direction = self.net.predict(torch.Tensor(oh_board.reshape(1, *oh_board.shape)).to(self.device).float())
_, score = try_to_move(board, direction)
if score == -1: # cannot move to the selected direction
self.error_counter += 1
if direction != board_to_move(board):
self.diff_counter += 1
# direction = board_to_move(board)
# print("score -1")
end = time.time()
self.t += start - end
return direction
def play(self, max_iter=np.inf, verbose=False):
super(TrainAgent_12, self).play(max_iter=max_iter, verbose=verbose)
self.statistics[self.game.score] += 1
def new_game(self, game):
self.game = game
DEFAULT_TEST_PATH = 'model3_dict_01_11.pkl'
class TestAgent(Agent):
def __init__(self, game, display=None):
super().__init__(game, display)
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self.net = nn2048_3().to(self.device)
self.net.load_state_dict(torch.load(DEFAULT_TEST_PATH, map_location=self.device))
self.net.eval()
def step(self):
board = self.game.board
oh_board = conv_to_onehot(board)
direction = self.net.predict(torch.Tensor(oh_board.reshape(1, *oh_board.shape)).to(self.device).float())
direction = int(direction.data.numpy())
return direction
| 35.291411
| 126
| 0.588701
| 1,421
| 11,505
| 4.579873
| 0.121042
| 0.055317
| 0.049785
| 0.028734
| 0.818531
| 0.807314
| 0.792717
| 0.782422
| 0.766441
| 0.766441
| 0
| 0.021544
| 0.302043
| 11,505
| 325
| 127
| 35.4
| 0.788917
| 0.070404
| 0
| 0.766234
| 0
| 0
| 0.01529
| 0.001994
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069264
| false
| 0.008658
| 0.04329
| 0
| 0.151515
| 0.017316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8908af31afe8f43f94988a790c4fd5eb4b22fa41
| 138
|
py
|
Python
|
python/Lesson1-Secrets/s3_secrets_usingconst.py
|
franTarkenton/IntroToJenkins
|
cbdd5933b8e0315fe747523f9b94b3728db9a585
|
[
"MIT"
] | null | null | null |
python/Lesson1-Secrets/s3_secrets_usingconst.py
|
franTarkenton/IntroToJenkins
|
cbdd5933b8e0315fe747523f9b94b3728db9a585
|
[
"MIT"
] | null | null | null |
python/Lesson1-Secrets/s3_secrets_usingconst.py
|
franTarkenton/IntroToJenkins
|
cbdd5933b8e0315fe747523f9b94b3728db9a585
|
[
"MIT"
] | null | null | null |
import constants
# you can now access any of your constants from constants module
print(f"bills password is {constants.BILLS_PASSWORD}")
| 27.6
| 64
| 0.804348
| 21
| 138
| 5.238095
| 0.761905
| 0.236364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137681
| 138
| 4
| 65
| 34.5
| 0.92437
| 0.449275
| 0
| 0
| 0
| 0
| 0.594595
| 0.351351
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
|
0
| 7
|
8918d9792bc9bf0be6b01759909e9c4eedf32494
| 2,158
|
py
|
Python
|
code/text_data_utils.py
|
ZengHaihong/Smart-network
|
f4143e2018641adc1dbe1bf51a0d76258ea449ac
|
[
"MIT"
] | 7
|
2019-10-11T08:02:49.000Z
|
2022-02-27T12:28:09.000Z
|
code/text_data_utils.py
|
qmylzx/Smart-network
|
f4143e2018641adc1dbe1bf51a0d76258ea449ac
|
[
"MIT"
] | null | null | null |
code/text_data_utils.py
|
qmylzx/Smart-network
|
f4143e2018641adc1dbe1bf51a0d76258ea449ac
|
[
"MIT"
] | 7
|
2019-10-11T08:02:42.000Z
|
2021-07-20T07:19:37.000Z
|
def generate_data_replace_string():
'''
为了将日期字符串 /2/5 转换成 0205这样,
构建一个map,key为原始字符串,value为目标字符串
:return:
date_replace_str_map
'''
ori_str_list = []
new_str_list = []
for idx in range(1,9):
str_num = '/'+str(idx);
new_str_num = '0'+str(idx);
ori_str_list.append(str_num);
new_str_list.append(new_str_num);
ori_str_map = {k:v+1 for v,k in enumerate(ori_str_list)}
date_replace_str_map= {k:new_str_list[idx] for idx,k in enumerate(ori_str_map.keys())}
ori_str_list = []
new_str_list = []
for idx in range(10,32):
str_num = '/' + str(idx);
new_str_num = str(idx)
ori_str_list.append(str_num)
new_str_list.append(new_str_num)
ori_str_map = {k:v+1 for v,k in enumerate(ori_str_list)}
replace_str_map_part2= {k:new_str_list[idx] for idx,k in enumerate(ori_str_map.keys())}
for key in replace_str_map_part2.keys():
date_replace_str_map[key] = replace_str_map_part2.get(key)
#print(replace_str_map)
return date_replace_str_map
def generate_time_replace_string():
'''
为了将日期字符串 ' 0:05' 转换成 '0005' 这样,
构建一个map,key为原始字符串,value为目标字符串
:return:
time_replace_str_map
'''
ori_str_list = []
new_str_list = []
for idx in range(9):
str_num = ' '+str(idx) + ':';
new_str_num = '0'+str(idx);
ori_str_list.append(str_num);
new_str_list.append(new_str_num);
ori_str_map = {k:v+1 for v,k in enumerate(ori_str_list)}
time_replace_str_map= {k:new_str_list[idx] for idx,k in enumerate(ori_str_map.keys())}
ori_str_list = []
new_str_list = []
for idx in range(10,24):
str_num = ' ' + str(idx) + ':';
new_str_num = str(idx)
ori_str_list.append(str_num)
new_str_list.append(new_str_num)
ori_str_map = {k:v+1 for v,k in enumerate(ori_str_list)}
replace_str_map_part2= {k:new_str_list[idx] for idx,k in enumerate(ori_str_map.keys())}
for key in replace_str_map_part2.keys():
time_replace_str_map[key] = replace_str_map_part2.get(key)
#print(replace_str_map)
return time_replace_str_map
| 31.275362
| 91
| 0.652456
| 358
| 2,158
| 3.541899
| 0.122905
| 0.113565
| 0.164038
| 0.094637
| 0.865931
| 0.823344
| 0.823344
| 0.823344
| 0.823344
| 0.823344
| 0
| 0.021505
| 0.224282
| 2,158
| 69
| 92
| 31.275362
| 0.735962
| 0.102873
| 0
| 0.727273
| 1
| 0
| 0.004253
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
64f825db678a5077d947194db0105d6708941ce0
| 6,120
|
py
|
Python
|
JigsawInferenceGizmo.py
|
shrutiichandra/JigsawInferenceGizmo
|
da9d60afa6b0a8d219fbab55bff2ce37f2013f73
|
[
"MIT"
] | 25
|
2022-03-03T19:47:05.000Z
|
2022-03-25T16:22:51.000Z
|
JigsawInferenceGizmo.py
|
shrutiichandra/JigsawInferenceGizmo
|
da9d60afa6b0a8d219fbab55bff2ce37f2013f73
|
[
"MIT"
] | 2
|
2022-03-04T08:10:21.000Z
|
2022-03-06T03:36:37.000Z
|
JigsawInferenceGizmo.py
|
shrutiichandra/JigsawInferenceGizmo
|
da9d60afa6b0a8d219fbab55bff2ce37f2013f73
|
[
"MIT"
] | 11
|
2022-03-03T22:49:46.000Z
|
2022-03-18T10:00:11.000Z
|
# JIG code from Stand-up Maths video "Why don't Jigsaw Puzzles have the correct number of pieces?"
def low_factors(n):
# all the factors which are the lower half of each factor pair
lf = []
for i in range(1, int(n**0.5)+1):
if n % i == 0:
lf.append(i)
return lf
def jig(w,h,n,b=0):
# percentage we'll check in either direction
threshold = 0.1
# the extra badness per piece
penalty = 1.005
ratio = max(w,h)/min(w,h) # switched to be greater than 1
print("")
print(f"{w} by {h} is picture ratio {round(ratio,4)}")
print("")
max_cap = int((1+threshold)*n)
min_cap = int((1-threshold)*n)
up_range = [i for i in range(n,max_cap+1)]
down_range = [i for i in range(min_cap,n)] # do not want n included again
down_range.reverse()
# start at 100 which is silly high and then move down.
up_best = 100
up_best_deets = []
down_best = 100
down_best_deets = []
# I am using the run marker so I know if looking above or below n
run = 0
for dis_range in [up_range,down_range]:
best_n = 0
best_n_ratio = 0
best_n_sides = []
if run == 0:
print(f"Looking for >= {n} solutions:")
print("")
else:
print("")
print("Just out of interest, here are smaller options:")
print("")
for i in dis_range:
this_best = 0
for j in low_factors(i):
j2 = int(i/j) # must be a whole number anyway
this_ratio = j2/j
if this_best == 0:
this_best = this_ratio
best_sides = [j,j2]
else:
if abs(this_ratio/ratio - 1) < abs(this_best/ratio - 1):
this_best = this_ratio
best_sides = [j,j2]
yes = 0
if best_n == 0:
yes = 1
else:
if abs(this_best/ratio - 1) < abs(best_n_ratio/ratio - 1):
yes = 1
if yes == 1:
best_n = i
best_n_ratio = this_best
best_n_sides = best_sides
piece_ratio = max(ratio,this_best)/min(ratio,this_best)
badness_score = (penalty**(abs(i-n)))*piece_ratio
if run == 0:
if badness_score < up_best:
up_best = badness_score
up_best_deets = [best_n,best_n_sides,best_n_ratio]
else:
if badness_score < down_best:
down_best = badness_score
down_best_deets = [best_n,best_n_sides,best_n_ratio]
print(f"{best_n} pieces in {best_n_sides} (grid ratio {round(best_n_ratio,4)}) needs piece ratio {round(piece_ratio,4)}")
if b==1:
print(f"[badness = {round(badness_score,5)}]")
print(f"for {n} the best is {best_n} pieces with size {best_n_sides}")
run += 1
print("")
print(f"If I had to guess: I think it's {up_best_deets[0]} pieces.")
if down_best < up_best:
print("")
print(f"BUT, fun fact, {down_best_deets[0]} would be even better.")
print("")
return 'DONE'
# I duplicated jig_v0 to make is easier to show in the video
def jig_v0(w,h,n,b=0):
# percentage we'll check in either direction
threshold = 0.1
penalty = 1.005
ratio = max(w,h)/min(w,h) # switched to be greater than 1
print("")
print(f"{w} by {h} is picture ratio {round(ratio,4)}")
print("")
max_cap = int((1+threshold)*n)
min_cap = int((1-threshold)*n)
up_range = [i for i in range(n,max_cap+1)]
down_range = [i for i in range(min_cap,n)] # do not want n included again
down_range.reverse()
# start at 100 which is silly high and then move down.
up_best = 100
up_best_deets = []
down_best = 100
down_best_deets = []
run = 0
for dis_range in [up_range,down_range]:
best_n = 0
best_n_ratio = 0
best_n_sides = []
if run == 0:
print(f"Looking for >= {n} solutions:")
print("")
else:
print("")
print("Just out of interest, here are smaller options:")
print("")
for i in dis_range:
this_best = 0
for j in low_factors(i):
j2 = int(i/j) # must be a whole number anyway
this_ratio = j2/j
if this_best == 0:
this_best = this_ratio
best_sides = [j,j2]
else:
if abs(this_ratio/ratio - 1) < abs(this_best/ratio - 1):
this_best = this_ratio
best_sides = [j,j2]
yes = 0
if best_n == 0:
yes = 1
else:
if abs(this_best/ratio - 1) < abs(best_n_ratio/ratio - 1):
yes = 1
if yes == 1:
best_n = i
best_n_ratio = this_best
best_n_sides = best_sides
piece_ratio = max(ratio,this_best)/min(ratio,this_best)
badness_score = (penalty**(abs(i-n)))*piece_ratio
if run == 0:
if badness_score < up_best:
up_best = badness_score
up_best_deets = [best_n,best_n_sides,best_n_ratio]
else:
if badness_score < down_best:
down_best = badness_score
down_best_deets = [best_n,best_n_sides,best_n_ratio]
print(f"{best_n} pieces in {best_n_sides} (grid ratio {round(best_n_ratio,4)}) needs piece ratio {round(piece_ratio,4)}")
if b==1:
print(f"[badness = {round(badness_score,5)}]")
run += 1
print("")
return 'DONE'
| 31.875
| 137
| 0.49951
| 843
| 6,120
| 3.424674
| 0.168446
| 0.062348
| 0.041566
| 0.029096
| 0.820229
| 0.820229
| 0.820229
| 0.820229
| 0.820229
| 0.820229
| 0
| 0.027566
| 0.401307
| 6,120
| 191
| 138
| 32.041885
| 0.760371
| 0.110784
| 0
| 0.916667
| 0
| 0.013889
| 0.13219
| 0.02618
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020833
| false
| 0
| 0
| 0
| 0.041667
| 0.1875
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8f69683541c0d2ae8fb633f190dba96a193f6fc5
| 148
|
py
|
Python
|
stable_baselines3/dqn/__init__.py
|
koulakis/stable-baselines3
|
08e7519381e800edc6bbd09577f14381b7341873
|
[
"MIT"
] | null | null | null |
stable_baselines3/dqn/__init__.py
|
koulakis/stable-baselines3
|
08e7519381e800edc6bbd09577f14381b7341873
|
[
"MIT"
] | null | null | null |
stable_baselines3/dqn/__init__.py
|
koulakis/stable-baselines3
|
08e7519381e800edc6bbd09577f14381b7341873
|
[
"MIT"
] | null | null | null |
from stable_baselines3.dqn.dqn import DQN
from stable_baselines3.dqn.policies import MlpPolicy
from stable_baselines3.dqn.policies import CnnPolicy
| 37
| 52
| 0.878378
| 21
| 148
| 6.047619
| 0.380952
| 0.23622
| 0.472441
| 0.543307
| 0.582677
| 0.582677
| 0
| 0
| 0
| 0
| 0
| 0.022059
| 0.081081
| 148
| 3
| 53
| 49.333333
| 0.911765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
713915d7d71c455fffc0203cbff522495c364eeb
| 24,445
|
py
|
Python
|
booking/tests.py
|
broskh/restaurants
|
665fe69c8eeaae80c9a802abe6d5ca3343ab1689
|
[
"MIT"
] | null | null | null |
booking/tests.py
|
broskh/restaurants
|
665fe69c8eeaae80c9a802abe6d5ca3343ab1689
|
[
"MIT"
] | null | null | null |
booking/tests.py
|
broskh/restaurants
|
665fe69c8eeaae80c9a802abe6d5ca3343ab1689
|
[
"MIT"
] | null | null | null |
from datetime import datetime, timedelta
from django.test import TestCase, Client
from django.urls import reverse
from django.utils import timezone
from booking.models import Booking
from restaurants.utils import get_coordinates
from user_management.models import Restaurant, User
class BookingMethodsTests(TestCase):
def setUp(self):
self.restaurant = Restaurant.objects.create(
name='da mario',
city='vignola',
address='via baracchini, 95',
n_places=50,
booking_duration=120
)
self.client_user = User.objects.create(
first_name='paolo',
last_name='verdi',
email='paolo.verdi@mail.com',
username='paolo1',
user_type=User.TYPES[0][0]
)
self.client_user.set_password('password')
self.restaurant_user = User.objects.create(
first_name='mario',
last_name='rossi',
email='mario.rossi@mail.com',
username='mario1',
user_type=User.TYPES[1][0],
restaurant_information=self.restaurant
)
self.restaurant_user.set_password('password')
restaurant_position = get_coordinates(self.restaurant.city + ', ' + self.restaurant.address)
self.restaurant.latitude = restaurant_position['lat']
self.restaurant.longitude = restaurant_position['lng']
self.booking = Booking(
client=self.client_user,
restaurant=self.restaurant,
n_places=2,
start_time=timezone.make_aware(datetime.now(), timezone.get_current_timezone()).replace(microsecond=0),
state=Booking.STATES[0][0]
)
self.booking.end_time = self.booking.calculate_end_time()
def test_end_is_after_start(self):
self.assertEqual(self.booking.end_time <= self.booking.start_time, False)
class IndexWiewTests(TestCase):
def setUp(self):
self.url = reverse('booking:index')
self.client = Client()
self.password = 'password'
self.restaurant = Restaurant.objects.create(
name='da mario',
city='vignola',
address='via baracchini, 95',
n_places=50,
booking_duration=120
)
restaurant_position = get_coordinates(self.restaurant.city + ', ' + self.restaurant.address)
self.restaurant.latitude = restaurant_position['lat']
self.restaurant.longitude = restaurant_position['lng']
self.restaurant.save()
self.client_user = User.objects.create(
first_name='paolo',
last_name='verdi',
email='paolo.verdi@mail.com',
username='paolo1',
user_type=User.TYPES[0][0]
)
self.client_user.set_password(self.password)
self.client_user.save()
self.restaurant_user = User.objects.create(
first_name='mario',
last_name='rossi',
email='mario.rossi@mail.com',
username='mario1',
user_type=User.TYPES[1][0],
restaurant_information=self.restaurant
)
self.restaurant_user.set_password(self.password)
self.restaurant_user.save()
def test_not_logged_user(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'Logout')
def test_client_logged(self):
self.client.login(username=self.client_user.username, password=self.password)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Gestione prenotazioni')
def test_restaurant_logged(self):
self.client.login(username=self.restaurant_user.username, password=self.password)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Gestione ristorante')
class ResultsWiewTests(TestCase):
def setUp(self):
self.url = reverse('booking:search_results')
self.client = Client()
self.datetime = timezone.make_aware(datetime.now(), timezone.get_current_timezone()).replace(microsecond=0)
self.restaurant1 = Restaurant(
name='da mario',
city='vignola',
address='via baracchini, 95',
n_places=50,
booking_duration=120
)
restaurant_position = get_coordinates(self.restaurant1.city + ', ' + self.restaurant1.address)
self.restaurant1.latitude = restaurant_position['lat']
self.restaurant1.longitude = restaurant_position['lng']
self.restaurant2 = Restaurant(
name='da paolo',
city='san cesario sul panaro',
address='via della meccanica',
n_places=80,
booking_duration=90
)
restaurant_position = get_coordinates(self.restaurant2.city + ', ' + self.restaurant2.address)
self.restaurant2.latitude = restaurant_position['lat']
self.restaurant2.longitude = restaurant_position['lng']
self.restaurant3 = Restaurant(
name='da paolo',
city='alba adriatica',
address='via pompeo',
n_places=120,
booking_duration=150
)
restaurant_position = get_coordinates(self.restaurant3.city + ', ' + self.restaurant3.address)
self.restaurant3.latitude = restaurant_position['lat']
self.restaurant3.longitude = restaurant_position['lng']
def test_with_results(self):
self.restaurant1.save()
self.restaurant2.save()
self.restaurant3.save()
data = {
'site': 'savignano sul panaro',
'date': self.datetime.strftime("%d/%m/%Y"),
'time': self.datetime.strftime("%H:%M"),
'n_clients': 2
}
response = self.client.get(self.url, data=data)
self.assertEqual(response.status_code, 200)
self.assertNotEqual(response.context['restaurants_available'], [])
def test_without_results(self):
data = {
'site': 'savignano sul panaro',
'date': self.datetime.strftime("%d/%m/%Y"),
'time': self.datetime.strftime("%H:%M"),
'n_clients': 2
}
response = self.client.get(self.url, data=data)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['restaurants_available'], [])
self.assertEqual(response.context['restaurants_busy'], [])
def test_ordered_results_and_closer_or_equal_than_50_km(self):
self.restaurant1.save()
self.restaurant2.save()
self.restaurant3.save()
data = {
'site': 'savignano sul panaro',
'date': self.datetime.strftime("%d/%m/%Y"),
'time': self.datetime.strftime("%H:%M"),
'n_clients': 2
}
response = self.client.get(self.url, data=data)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['restaurants_available'][0]['restaurant'], self.restaurant1)
self.assertEqual(response.context['restaurants_available'][1]['restaurant'], self.restaurant2)
self.assertEqual(len(response.context['restaurants_available']), 2)
class RestaurantBookingsWiewTests(TestCase):
def setUp(self):
self.url = reverse('booking:restaurant_bookings')
self.client = Client()
self.password = 'password'
self.restaurant = Restaurant.objects.create(
name='da mario',
city='vignola',
address='via baracchini, 95',
n_places=50,
booking_duration=120
)
restaurant_position = get_coordinates(self.restaurant.city + ', ' + self.restaurant.address)
self.restaurant.latitude = restaurant_position['lat']
self.restaurant.longitude = restaurant_position['lng']
self.restaurant.save()
self.client_user = User.objects.create(
first_name='paolo',
last_name='verdi',
email='paolo.verdi@mail.com',
username='paolo1',
user_type=User.TYPES[0][0]
)
self.client_user.set_password(self.password)
self.client_user.save()
self.restaurant_user = User.objects.create(
first_name='mario',
last_name='rossi',
email='mario.rossi@mail.com',
username='mario1',
user_type=User.TYPES[1][0],
restaurant_information=self.restaurant
)
self.restaurant_user.set_password(self.password)
self.restaurant_user.save()
def test_not_logged_user(self):
response = self.client.get(self.url)
expected_url = reverse('login') + '?next=' + self.url
self.assertRedirects(response, expected_url, status_code=302, target_status_code=200)
def test_client_logged(self):
self.client.login(username=self.client_user.username, password=self.password)
response = self.client.get(self.url)
expected_url = reverse('login') + '?next=' + self.url
self.assertRedirects(response, expected_url, status_code=302, target_status_code=200)
def test_restaurant_logged_with_restaurant_information(self):
self.restaurant_user.restaurant_information = self.restaurant
self.restaurant_user.save()
self.client.login(username=self.restaurant_user.username, password=self.password)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
class ClientBookingsWiewTests(TestCase):
def setUp(self):
self.url = reverse('booking:client_bookings')
self.client = Client()
self.password = 'password'
self.datetime = timezone.make_aware(datetime.now(), timezone.get_current_timezone()).replace(microsecond=0)
self.restaurant = Restaurant.objects.create(
name='da mario',
city='vignola',
address='via baracchini, 95',
n_places=50,
booking_duration=120
)
restaurant_position = get_coordinates(self.restaurant.city + ', ' + self.restaurant.address)
self.restaurant.latitude = restaurant_position['lat']
self.restaurant.longitude = restaurant_position['lng']
self.restaurant.save()
self.client_user = User.objects.create(
first_name='paolo',
last_name='verdi',
email='paolo.verdi@mail.com',
username='paolo1',
user_type=User.TYPES[0][0]
)
self.client_user.set_password(self.password)
self.client_user.save()
self.restaurant_user = User.objects.create(
first_name='mario',
last_name='rossi',
email='mario.rossi@mail.com',
username='mario1',
user_type=User.TYPES[1][0],
restaurant_information=self.restaurant
)
self.restaurant_user.set_password(self.password)
self.restaurant_user.save()
self.booking = Booking(
client=self.client_user,
restaurant=self.restaurant,
start_time=self.datetime,
n_places=2,
state=Booking.STATES[1][0]
)
self.booking.end_time = self.booking.calculate_end_time()
def test_not_logged_user(self):
response = self.client.get(self.url)
expected_url = reverse('login') + '?next=' + self.url
self.assertRedirects(response, expected_url, status_code=302, target_status_code=200)
def test_restaurant_logged(self):
self.restaurant_user.client_information = self.restaurant
self.restaurant_user.save()
self.client.login(username=self.restaurant_user.username, password=self.password)
response = self.client.get(self.url)
expected_url = reverse('login') + '?next=' + self.url
self.assertRedirects(response, expected_url, status_code=302, target_status_code=200)
def test_client_logged_without_results(self):
self.client.login(username=self.client_user.username, password=self.password)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['booking_list'], [])
def test_client_logged_with_results(self):
self.booking.save()
self.client.login(username=self.client_user.username, password=self.password)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['booking_list'][0], self.booking)
def test_booking_before_now(self):
self.booking.start_time = self.datetime - timedelta(minutes=1)
self.booking.save()
self.client.login(username=self.client_user.username, password=self.password)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['booking_list'], [])
class DeleteBookingsWiewTests(TestCase):
def setUp(self):
self.url = reverse('booking:delete_booking')
self.client = Client()
self.password = 'password'
self.datetime = timezone.make_aware(datetime.now(), timezone.get_current_timezone()).replace(microsecond=0)
self.restaurant = Restaurant.objects.create(
name='da mario',
city='vignola',
address='via baracchini, 95',
n_places=50,
booking_duration=120
)
restaurant_position = get_coordinates(self.restaurant.city + ', ' + self.restaurant.address)
self.restaurant.latitude = restaurant_position['lat']
self.restaurant.longitude = restaurant_position['lng']
self.restaurant.save()
self.client_user = User.objects.create(
first_name='paolo',
last_name='verdi',
email='paolo.verdi@mail.com',
username='paolo1',
user_type=User.TYPES[0][0]
)
self.client_user.set_password(self.password)
self.client_user.save()
self.restaurant_user = User.objects.create(
first_name='mario',
last_name='rossi',
email='mario.rossi@mail.com',
username='mario1',
user_type=User.TYPES[1][0],
restaurant_information=self.restaurant
)
self.restaurant_user.set_password(self.password)
self.restaurant_user.save()
self.booking = Booking(
client=self.client_user,
restaurant=self.restaurant,
start_time=self.datetime,
n_places=2,
state=Booking.STATES[1][0]
)
self.booking.end_time = self.booking.calculate_end_time()
def test_not_logged_user(self):
response = self.client.get(self.url)
expected_url = reverse('login') + '?next=' + self.url
self.assertRedirects(response, expected_url, status_code=302, target_status_code=200)
def test_restaurant_logged(self):
self.client.login(username=self.restaurant_user.username, password=self.password)
response = self.client.get(self.url)
expected_url = reverse('login') + '?next=' + self.url
self.assertRedirects(response, expected_url, status_code=302, target_status_code=200)
def test_client_logged_ajax_call(self):
self.client.login(username=self.client_user.username, password=self.password)
response = self.client.post(self.url, {}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
def test_client_logged_no_ajax_call(self):
self.client.login(username=self.client_user.username, password=self.password)
response = self.client.post(self.url, {})
self.assertEqual(response.status_code, 404)
def test_client_logged_ajax_call_with_booking(self):
self.booking.save()
self.client.login(username=self.client_user.username, password=self.password)
data = {
'id': self.booking.id
}
data_result = {
'result': 'success'
}
response = self.client.post(self.url, data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(str(response.content, encoding='utf8'), data_result)
def test_client_logged_ajax_call_without_booking(self):
self.client.login(username=self.client_user.username, password=self.password)
data = {
'id': 0
}
data_result = {
'result': 'error'
}
response = self.client.post(self.url, data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(str(response.content, encoding='utf8'), data_result)
class EditBookingsWiewTests(TestCase):
def setUp(self):
self.url = reverse('booking:edit_booking')
self.client = Client()
self.password = 'password'
self.datetime = timezone.make_aware(datetime.now(), timezone.get_current_timezone()).replace(microsecond=0)
self.restaurant = Restaurant.objects.create(
name='da mario',
city='vignola',
address='via baracchini, 95',
n_places=50,
booking_duration=120
)
restaurant_position = get_coordinates(self.restaurant.city + ', ' + self.restaurant.address)
self.restaurant.latitude = restaurant_position['lat']
self.restaurant.longitude = restaurant_position['lng']
self.restaurant.save()
self.client_user = User.objects.create(
first_name='paolo',
last_name='verdi',
email='paolo.verdi@mail.com',
username='paolo1',
user_type=User.TYPES[0][0]
)
self.client_user.set_password(self.password)
self.client_user.save()
self.restaurant_user = User.objects.create(
first_name='mario',
last_name='rossi',
email='mario.rossi@mail.com',
username='mario1',
user_type=User.TYPES[1][0],
restaurant_information=self.restaurant
)
self.restaurant_user.set_password(self.password)
self.restaurant_user.save()
self.booking = Booking(
client=self.client_user,
restaurant=self.restaurant,
start_time=self.datetime,
n_places=2,
state=Booking.STATES[1][0]
)
self.booking.end_time = self.booking.calculate_end_time()
def test_not_logged_user(self):
response = self.client.get(self.url)
expected_url = reverse('login') + '?next=' + self.url
self.assertRedirects(response, expected_url, status_code=302, target_status_code=200)
def test_restaurant_logged(self):
self.client.login(username=self.restaurant_user.username, password=self.password)
response = self.client.get(self.url)
expected_url = reverse('login') + '?next=' + self.url
self.assertRedirects(response, expected_url, status_code=302, target_status_code=200)
def test_client_logged_ajax_call(self):
self.client.login(username=self.client_user.username, password=self.password)
response = self.client.post(self.url, {}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
def test_client_logged_no_ajax_call(self):
self.client.login(username=self.client_user.username, password=self.password)
response = self.client.post(self.url, {})
self.assertEqual(response.status_code, 404)
def test_client_logged_ajax_call_with_booking(self):
self.booking.save()
self.client.login(username=self.client_user.username, password=self.password)
data = {
'id': self.booking.id,
'n_places': 10,
'start_time': (self.datetime + timedelta(minutes=30)).strftime("%Y-%m-%d-%H-%M-%S"),
'state': Booking.STATES[0][0]
}
data_result = {
'result': 'success'
}
response = self.client.post(self.url, data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(str(response.content, encoding='utf8'), data_result)
def test_client_logged_ajax_call_without_booking(self):
self.client.login(username=self.client_user.username, password=self.password)
data = {
'id': 0,
'n_places': 10,
'start_time': (self.datetime + timedelta(minutes=30)).strftime("%Y-%m-%d-%H-%M-%S"),
'state': Booking.STATES[0][0]
}
data_result = {
'result': 'error'
}
response = self.client.post(self.url, data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(str(response.content, encoding='utf8'), data_result)
class CheckAvailabilityWiewTests(TestCase):
def setUp(self):
self.url = reverse('booking:check_availability')
self.client = Client()
self.password = 'password'
self.datetime = timezone.make_aware(datetime.now(), timezone.get_current_timezone()).replace(microsecond=0)
self.restaurant = Restaurant.objects.create(
name='da mario',
city='vignola',
address='via baracchini, 95',
n_places=50,
booking_duration=120
)
restaurant_position = get_coordinates(self.restaurant.city + ', ' + self.restaurant.address)
self.restaurant.latitude = restaurant_position['lat']
self.restaurant.longitude = restaurant_position['lng']
self.restaurant.save()
self.client_user = User.objects.create(
first_name='paolo',
last_name='verdi',
email='paolo.verdi@mail.com',
username='paolo1',
user_type=User.TYPES[0][0]
)
self.client_user.set_password(self.password)
self.client_user.save()
self.restaurant_user = User.objects.create(
first_name='mario',
last_name='rossi',
email='mario.rossi@mail.com',
username='mario1',
user_type=User.TYPES[1][0],
restaurant_information=self.restaurant
)
self.restaurant_user.set_password(self.password)
self.restaurant_user.save()
self.booking = Booking(
client=self.client_user,
restaurant=self.restaurant,
start_time=self.datetime,
n_places=2,
state=Booking.STATES[1][0]
)
self.booking.end_time = self.booking.calculate_end_time()
def test_client_logged_ajax_call(self):
response = self.client.post(self.url, {}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
def test_client_logged_no_ajax_call(self):
response = self.client.post(self.url, {})
self.assertEqual(response.status_code, 404)
def test_client_logged_ajax_call_with_data(self):
self.booking.save()
self.client.login(username=self.client_user.username, password=self.password)
data = {
'restaurant_id': self.restaurant.id,
'n_places': 10,
'start_time': self.datetime.strftime("%Y-%m-%d-%H-%M-%S")
}
data_result = {
'result': 'success',
'state': Booking.STATES[1][0]
}
response = self.client.post(self.url, data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(str(response.content, encoding='utf8'), data_result)
def test_client_logged_ajax_call_without_data(self):
data = {
'restaurant_id': 0,
'n_places': 10,
'start_time': self.datetime.strftime("%Y-%m-%d-%H-%M-%S")
}
data_result = {
'result': 'error'
}
response = self.client.post(self.url, data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(str(response.content, encoding='utf8'), data_result)
| 39.555016
| 115
| 0.633381
| 2,705
| 24,445
| 5.54159
| 0.066543
| 0.063376
| 0.036424
| 0.042562
| 0.898866
| 0.872915
| 0.865177
| 0.863709
| 0.843362
| 0.842362
| 0
| 0.016499
| 0.248722
| 24,445
| 617
| 116
| 39.619125
| 0.799728
| 0
| 0
| 0.775322
| 0
| 0
| 0.083044
| 0.009204
| 0
| 0
| 0
| 0
| 0.090239
| 1
| 0.071823
| false
| 0.071823
| 0.012891
| 0
| 0.099448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
85417f819edd7802b362c96f8f5a411bc62c7664
| 11,364
|
py
|
Python
|
app/tests/unit/modules/entity/test_request_entity.py
|
Clivern/Kevin
|
dfa6fe99d2599a3f1a9da7c9690e2fba6a825f1d
|
[
"Apache-2.0"
] | 2
|
2018-06-18T09:37:36.000Z
|
2021-06-23T02:09:41.000Z
|
app/tests/unit/modules/entity/test_request_entity.py
|
Clivern/Kevin
|
dfa6fe99d2599a3f1a9da7c9690e2fba6a825f1d
|
[
"Apache-2.0"
] | 45
|
2018-04-08T11:53:05.000Z
|
2018-06-12T20:45:38.000Z
|
app/tests/unit/modules/entity/test_request_entity.py
|
Clivern/Kevin
|
dfa6fe99d2599a3f1a9da7c9690e2fba6a825f1d
|
[
"Apache-2.0"
] | null | null | null |
"""
Request Entity Test Cases
"""
from django.test import TestCase
from pprint import pprint
from app.modules.entity.namespace_entity import Namespace_Entity
from app.modules.entity.endpoint_entity import Endpoint_Entity
from app.modules.entity.request_entity import Request_Entity
from django.contrib.auth.models import User
class Test_Request_Entity(TestCase):
def test_insert_one(self):
user = User(
first_name = "Joe",
last_name = "Doe",
username = "joe",
email = "joe@kevin.com",
password = "joe_doe"
)
user.save()
namespace_entity = Namespace_Entity()
endpoint_entity = Endpoint_Entity()
request_entity = Request_Entity()
namespace = namespace_entity.insert_one({
"name": "kevin",
"is_public": True,
"user_id": user.pk
})
endpoint = endpoint_entity.insert_one({
"route": "/",
"method": Endpoint_Entity.GET,
"target": Endpoint_Entity.DEBUG,
"route_rules": "{}",
"headers_rules": "{}",
"body_rules": "{}",
"namespace_id": namespace.id
})
self.assertTrue(namespace)
self.assertTrue(namespace.id > 0)
endpoint_entity = Endpoint_Entity();
self.assertTrue(endpoint_entity.insert_one({
"route": "/",
"method": Endpoint_Entity.GET,
"target": Endpoint_Entity.DEBUG,
"route_rules": "{}",
"headers_rules": "{}",
"body_rules": "{}",
"namespace_id": namespace.id
}))
request = request_entity.insert_one({
"uri": "/",
"method": Request_Entity.GET,
"headers": "{}",
"body": "{}",
"status": Request_Entity.DEBUG,
"endpoint_id": endpoint.id
})
self.assertTrue(request)
self.assertTrue(request.id > 0)
def test_insert_many(self):
user = User(
first_name = "Joe",
last_name = "Doe",
username = "joe",
email = "joe@kevin.com",
password = "joe_doe"
)
user.save()
namespace_entity = Namespace_Entity()
endpoint_entity = Endpoint_Entity()
request_entity = Request_Entity()
namespace = namespace_entity.insert_one({
"name": "kevin",
"is_public": True,
"user_id": user.pk
})
endpoint = endpoint_entity.insert_one({
"route": "/",
"method": Endpoint_Entity.GET,
"target": Endpoint_Entity.DEBUG,
"route_rules": "{}",
"headers_rules": "{}",
"body_rules": "{}",
"namespace_id": namespace.id
})
self.assertTrue(namespace)
self.assertTrue(namespace.id > 0)
endpoint_entity = Endpoint_Entity();
self.assertTrue(endpoint_entity.insert_one({
"route": "/",
"method": Endpoint_Entity.GET,
"target": Endpoint_Entity.DEBUG,
"route_rules": "{}",
"headers_rules": "{}",
"body_rules": "{}",
"namespace_id": namespace.id
}))
request = request_entity.insert_many([
{"uri": "/","method": Request_Entity.GET,"headers": "{}","body": "{}","status": Request_Entity.DEBUG,"endpoint_id": endpoint.id},
{"uri": "/","method": Request_Entity.POST,"headers": "{}","body": "{}","status": Request_Entity.DEBUG,"endpoint_id": endpoint.id},
{"uri": "/{id}","method": Request_Entity.GET,"headers": "{}","body": "{}","status": Request_Entity.DEBUG,"endpoint_id": endpoint.id},
{"uri": "/{id}","method": Request_Entity.PUT,"headers": "{}","body": "{}","status": Request_Entity.DEBUG,"endpoint_id": endpoint.id}
])
self.assertTrue(request)
def test_get_one_by_id(self):
user = User(
first_name = "Joe",
last_name = "Doe",
username = "joe",
email = "joe@kevin.com",
password = "joe_doe"
)
user.save()
namespace_entity = Namespace_Entity()
endpoint_entity = Endpoint_Entity()
request_entity = Request_Entity()
namespace = namespace_entity.insert_one({
"name": "kevin",
"is_public": True,
"user_id": user.pk
})
endpoint = endpoint_entity.insert_one({
"route": "/",
"method": Endpoint_Entity.GET,
"target": Endpoint_Entity.DEBUG,
"route_rules": "{}",
"headers_rules": "{}",
"body_rules": "{}",
"namespace_id": namespace.id
})
self.assertTrue(namespace)
self.assertTrue(namespace.id > 0)
endpoint_entity = Endpoint_Entity();
self.assertTrue(endpoint_entity.insert_one({
"route": "/",
"method": Endpoint_Entity.GET,
"target": Endpoint_Entity.DEBUG,
"route_rules": "{}",
"headers_rules": "{}",
"body_rules": "{}",
"namespace_id": namespace.id
}))
request = request_entity.insert_one({
"uri": "/",
"method": Request_Entity.GET,
"headers": "{}",
"body": "{}",
"status": Request_Entity.DEBUG,
"endpoint_id": endpoint.id
})
self.assertTrue(request)
self.assertTrue(request.id > 0)
request = request_entity.get_one_by_id(request.id)
self.assertEqual("get/debug", request.method + request.uri + request.status)
def test_get_many_by_endpoint(self):
user = User(
first_name = "Joe",
last_name = "Doe",
username = "joe",
email = "joe@kevin.com",
password = "joe_doe"
)
user.save()
namespace_entity = Namespace_Entity()
endpoint_entity = Endpoint_Entity()
request_entity = Request_Entity()
namespace = namespace_entity.insert_one({
"name": "kevin",
"is_public": True,
"user_id": user.pk
})
endpoint = endpoint_entity.insert_one({
"route": "/",
"method": Endpoint_Entity.GET,
"target": Endpoint_Entity.DEBUG,
"route_rules": "{}",
"headers_rules": "{}",
"body_rules": "{}",
"namespace_id": namespace.id
})
self.assertTrue(namespace)
self.assertTrue(namespace.id > 0)
endpoint_entity = Endpoint_Entity();
self.assertTrue(endpoint_entity.insert_one({
"route": "/",
"method": Endpoint_Entity.GET,
"target": Endpoint_Entity.DEBUG,
"route_rules": "{}",
"headers_rules": "{}",
"body_rules": "{}",
"namespace_id": namespace.id
}))
request = request_entity.insert_many([
{"uri": "/","method": Request_Entity.GET,"headers": "{}","body": "{}","status": Request_Entity.DEBUG,"endpoint_id": endpoint.id},
{"uri": "/","method": Request_Entity.POST,"headers": "{}","body": "{}","status": Request_Entity.DEBUG,"endpoint_id": endpoint.id},
{"uri": "/{id}","method": Request_Entity.GET,"headers": "{}","body": "{}","status": Request_Entity.DEBUG,"endpoint_id": endpoint.id},
{"uri": "/{id}","method": Request_Entity.PUT,"headers": "{}","body": "{}","status": Request_Entity.DEBUG,"endpoint_id": endpoint.id}
])
self.assertEqual(request_entity.get_many_by_endpoint(endpoint.id, "create_at", True).count(), 4)
def test_update_one_by_id(self):
user = User(
first_name = "Joe",
last_name = "Doe",
username = "joe",
email = "joe@kevin.com",
password = "joe_doe"
)
user.save()
namespace_entity = Namespace_Entity()
endpoint_entity = Endpoint_Entity()
request_entity = Request_Entity()
namespace = namespace_entity.insert_one({
"name": "kevin",
"is_public": True,
"user_id": user.pk
})
endpoint = endpoint_entity.insert_one({
"route": "/",
"method": Endpoint_Entity.GET,
"target": Endpoint_Entity.DEBUG,
"route_rules": "{}",
"headers_rules": "{}",
"body_rules": "{}",
"namespace_id": namespace.id
})
self.assertTrue(namespace)
self.assertTrue(namespace.id > 0)
endpoint_entity = Endpoint_Entity();
self.assertTrue(endpoint_entity.insert_one({
"route": "/",
"method": Endpoint_Entity.GET,
"target": Endpoint_Entity.DEBUG,
"route_rules": "{}",
"headers_rules": "{}",
"body_rules": "{}",
"namespace_id": namespace.id
}))
request = request_entity.insert_one({
"uri": "/",
"method": Request_Entity.GET,
"headers": "{}",
"body": "{}",
"status": Request_Entity.DEBUG,
"endpoint_id": endpoint.id
})
self.assertTrue(request)
self.assertTrue(request.id > 0)
self.assertTrue(request_entity.update_one_by_id(request.id, {"uri" : "/new"}))
request = request_entity.get_one_by_id(request.id)
self.assertEqual("get/newdebug", request.method + request.uri + request.status)
def test_delete_one_by_id(self):
user = User(
first_name = "Joe",
last_name = "Doe",
username = "joe",
email = "joe@kevin.com",
password = "joe_doe"
)
user.save()
namespace_entity = Namespace_Entity()
endpoint_entity = Endpoint_Entity()
request_entity = Request_Entity()
namespace = namespace_entity.insert_one({
"name": "kevin",
"is_public": True,
"user_id": user.pk
})
endpoint = endpoint_entity.insert_one({
"route": "/",
"method": Endpoint_Entity.GET,
"target": Endpoint_Entity.DEBUG,
"route_rules": "{}",
"headers_rules": "{}",
"body_rules": "{}",
"namespace_id": namespace.id
})
self.assertTrue(namespace)
self.assertTrue(namespace.id > 0)
endpoint_entity = Endpoint_Entity();
self.assertTrue(endpoint_entity.insert_one({
"route": "/",
"method": Endpoint_Entity.GET,
"target": Endpoint_Entity.DEBUG,
"route_rules": "{}",
"headers_rules": "{}",
"body_rules": "{}",
"namespace_id": namespace.id
}))
request = request_entity.insert_one({
"uri": "/",
"method": Request_Entity.GET,
"headers": "{}",
"body": "{}",
"status": Request_Entity.DEBUG,
"endpoint_id": endpoint.id
})
self.assertTrue(request)
self.assertTrue(request.id > 0)
self.assertTrue(request_entity.delete_one_by_id(request.id))
self.assertFalse(request_entity.delete_one_by_id(1000))
| 36.423077
| 145
| 0.529391
| 1,059
| 11,364
| 5.409821
| 0.061379
| 0.15151
| 0.057602
| 0.058649
| 0.923721
| 0.911852
| 0.902775
| 0.902775
| 0.887764
| 0.887764
| 0
| 0.001945
| 0.321454
| 11,364
| 312
| 146
| 36.423077
| 0.741019
| 0.0022
| 0
| 0.915541
| 0
| 0
| 0.151871
| 0
| 0
| 0
| 0
| 0
| 0.111486
| 1
| 0.02027
| false
| 0.02027
| 0.02027
| 0
| 0.043919
| 0.003378
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
859383108cc9e4ced082f12dfb83fe29d05bf98c
| 12,668
|
py
|
Python
|
lib/ult/Generate_HICO_detection.py
|
abreza/HOI-CL
|
c5be517bb26eac73ef88a39d6ec9e564c3379714
|
[
"MIT"
] | 40
|
2021-04-09T17:53:08.000Z
|
2022-03-30T02:38:10.000Z
|
lib/ult/Generate_HICO_detection.py
|
abreza/HOI-CL
|
c5be517bb26eac73ef88a39d6ec9e564c3379714
|
[
"MIT"
] | 21
|
2021-04-09T19:05:47.000Z
|
2022-01-31T23:17:16.000Z
|
lib/ult/Generate_HICO_detection.py
|
abreza/HOI-CL
|
c5be517bb26eac73ef88a39d6ec9e564c3379714
|
[
"MIT"
] | 8
|
2021-05-30T12:37:00.000Z
|
2022-03-14T03:13:57.000Z
|
# --------------------------------------------------------
# Tensorflow VCL
# Licensed under The MIT License [see LICENSE for details]
# Written by Zhi Hou
# --------------------------------------------------------
"""
Change the HICO-DET detection results to the right format.
"""
import pickle
import numpy as np
import scipy.io as sio
import os
# HICO = None
from ult.tools import get_convert_matrix
def save_HICO(HICO, HICO_dir, classid, begin, finish, fuse_type='spho'):
all_boxes = []
for i in range(finish - begin + 1):
total = []
score = []
for key, value in HICO.items():
for element in value:
if element[2] == classid:
temp = []
temp.append(element[0].tolist()) # Human box
temp.append(element[1].tolist()) # Object box
temp.append(int(key)) # image id
temp.append(int(i)) # action id (0-599)
# if fuse_type == 'spv':
# preds = element[11]
# else:
preds = obtain_fuse_preds(element, fuse_type)
# preds = obtain_fuse_preds(element, fuse_type)
# cls_prob_sp * (cls_prob_O + cls_prob_H) + cls_prob_verbs
# preds = pSp * (pO + pH + pVerbs)
# preds = pSp * (pO + pH)
# preds = pSp
# preds = pO + pH
# preds = pSp * pVerbs
# preds = pVerbs
# print(preds, element[4], element[5])
temp.append(preds[begin - 1 + i] * element[4] * element[5])
total.append(temp)
score.append(preds[begin - 1 + i] * element[4] * element[5])
idx = np.argsort(score, axis=0)[::-1]
for i_idx in range(min(len(idx), 19999)):
all_boxes.append(total[idx[i_idx]])
savefile = HICO_dir + 'detections_' + str(classid).zfill(2) + '.mat'
# print('length:', classid, len(all_boxes))
sio.savemat(savefile, {'all_boxes': all_boxes})
return all_boxes
verb_to_HO_matrix, obj_to_HO_matrix = get_convert_matrix()
hoi_2_obj = {}
for i in range(600):
for j in range(80):
if obj_to_HO_matrix[j][i] > 0:
hoi_2_obj[i] = j
def obtain_fuse_preds(element, fuse_type):
preds = element[3]
if fuse_type != 'preds':
pH = element[6]
pO = element[7]
pSp = element[8]
pHoi = element[9]
if fuse_type == 'preds':
preds = preds
elif fuse_type == 'spho':
preds = pSp * (pO + pH)
elif fuse_type == 'ho':
preds = pO + pH
elif fuse_type == 'spv':
preds = pSp * pHoi
elif fuse_type == 'sp':
preds = pSp
elif fuse_type == 'v':
preds = pHoi
else:
raise Exception('fuse_type error, you must select those types{spho, spv, sp, sphov}')
return preds
def save_HICO3(HICO, HICO_dir, classid, begin, finish, fuse_type='spho'):
# "spho" is from iCAN which includes three branch: sp, v, o
global hoi_2_obj
global obj_to_HO_matrix
all_boxes = []
ones = np.ones(600)
for i in range(finish - begin + 1):
total = []
score = []
for key, value in HICO.items():
for element in value:
if fuse_type == 'spv':
preds = element[11]
else:
preds = obtain_fuse_preds(element, fuse_type)
# st1 = time.time()
obj_scores = element[12] # here is the different
objid = element[13]
# objid = label_trans_map[objid] + 1
objid += 1
element[5] = obj_scores
if objid == classid:
temp = []
temp.append(element[0].tolist()) # Human box
temp.append(element[1].tolist()) # Object box
temp.append(int(key)) # image id
temp.append(int(i)) # action id (0-599)
temp.append(preds[begin - 1 + i] * element[4] * element[5])
total.append(temp)
score.append(preds[begin - 1 + i] * element[4] * element[5])
idx = np.argsort(score, axis=0)[::-1]
for i_idx in range(min(len(idx), 19999)):
all_boxes.append(total[idx[i_idx]])
savefile = HICO_dir + 'detections_' + str(classid).zfill(2) + '.mat'
# print('length:', classid, len(all_boxes))
sio.savemat(savefile, {'all_boxes': all_boxes})
return all_boxes
def Generate_HICO_detection3(HICO, HICO_dir, fuse_type, gpool, func_type = 0):
if not os.path.exists(HICO_dir):
os.makedirs(HICO_dir)
# Remove previous results
filelist = [ f for f in os.listdir(HICO_dir)]
for f in filelist:
os.remove(os.path.join(HICO_dir, f))
params = [[1 ,161, 170], # 1 person
[2 ,11, 24],# 2 bicycle
[3, 66, 76 ], # 3 car
[ 4, 147, 160], # 4 motorcycle
[ 5, 1, 10], # 5 airplane
[ 6, 55, 65], # 6 bus
[ 7, 187, 194], # 7 train
[ 8, 568, 576], # 8 truck
[ 9, 32, 46], # 9 boat
[ 10, 563, 567], # 10 traffic light
[ 11, 326, 330], # 11 fire_hydrant
[ 12, 503, 506], # 12 stop_sign
[ 13, 415, 418], # 13 parking_meter
[ 14, 244, 247], # 14 bench
[ 15, 25, 31], # 15 bird
[ 16, 77, 86], # 16 cat
[ 17, 112, 129], # 17 dog
[ 18, 130, 146], # 18 horse
[ 19, 175, 186], # 19 sheep
[ 20, 97, 107], # 20 cow
[ 21, 314, 325], # 21 elephant
[ 22, 236, 239], # 22 bear
[ 23, 596, 600], # 23 zebra
[ 24, 343, 348], # 24 giraffe
[ 25, 209, 214], # 25 backpack
[ 26, 577, 584], # 26 umbrella
[ 27, 353, 356], # 27 handbag
[ 28, 539, 546], # 28 tie
[ 29, 507, 516], # 29 suitcase
[ 30, 337, 342], # 30 Frisbee
[ 31, 464, 474], # 31 skis
[ 32, 475, 483], # 32 snowboard
[ 33, 489, 502], # 33 sports_ball
[ 34, 369, 376], # 34 kite
[ 35, 225, 232], # 35 baseball_bat
[ 36, 233, 235], # 36 baseball_glove
[ 37, 454, 463], # 37 skateboard
[ 38, 517, 528], # 38 surfboard
[ 39, 534, 538], # 39 tennis_racket
[ 40, 47, 54], # 40 bottle
[ 41, 589, 595], # 41 wine_glass
[ 42, 296, 305], # 42 cup
[ 43, 331, 336], # 43 fork
[ 44, 377, 383], # 44 knife
[ 45, 484, 488], # 45 spoon
[ 46, 253, 257], # 46 bowl
[ 47, 215, 224], # 47 banana
[ 48, 199, 208], # 48 apple
[ 49, 439, 445], # 49 sandwich
[ 50, 398, 407], # 50 orange
[ 51, 258, 264], # 51 broccoli
[ 52, 274, 283], # 52 carrot
[ 53, 357, 363], # 53 hot_dog
[ 54, 419, 429], # 54 pizza
[ 55, 306, 313], # 55 donut
[ 56, 265, 273], # 56 cake
[ 57, 87, 92], # 57 chair
[ 58, 93, 96], # 58 couch
[ 59, 171, 174], # 59 potted_plant
[ 60, 240, 243], # 60 bed
[ 61, 108, 111], # 61 dining_table
[ 62, 551, 558], # 62 toilet
[ 63, 195, 198], # 63 TV
[ 64, 384, 389], # 64 laptop
[ 65, 394, 397], # 65 mouse
[ 66, 435, 438], # 66 remote
[ 67, 364, 368], # 67 keyboard
[ 68, 284, 290], # 68 cell_phone
[ 69, 390, 393], # 69 microwave
[ 70, 408, 414], # 70 oven
[ 71, 547, 550], # 71 toaster
[ 72, 450, 453], # 72 sink
[ 73, 430, 434], # 73 refrigerator
[ 74, 248, 252], # 74 book
[ 75, 291, 295], # 75 clock
[ 76, 585, 588], # 76 vase
[ 77, 446, 449], # 77 scissors
[ 78, 529, 533], # 78 teddy_bear
[ 79, 349, 352], # 79 hair_drier
[ 80, 559, 562], # 80 toothbrush
]
import datetime
# from multiprocessing import Pool
#
# process_num = 16 if fuse_type == 'spv' else 2
# # global pool
# # if pool is None:
# pool = Pool(processes=process_num)
# def func(item):
#
# save_HICO(HICO, HICO_dir, item[0], item[1], item[2])
#
from itertools import repeat
# gpool.starmap(save_HICO1, zip(repeat(output_file), repeat(HICO_dir), params, repeat(fuse_type)))
from sys import version_info
print('Load HICO sucessfully', datetime.datetime.now())
all_boxes = []
for p in params:
# print(p)
res = save_HICO3(HICO, HICO_dir, p[0], p[1], p[2], fuse_type)
# all_boxes.extend(res)
# savefile = HICO_dir + 'detections.mat'
# sio.savemat(savefile, {'all_boxes': all_boxes})
# print('end', p)
print("Finish save HICO", datetime.datetime.now())
def Generate_HICO_detection(output_file, HICO_dir, fuse_type, gpool):
if not os.path.exists(HICO_dir):
os.makedirs(HICO_dir)
# Remove previous results
filelist = [ f for f in os.listdir(HICO_dir)]
for f in filelist:
os.remove(os.path.join(HICO_dir, f))
params = [[1 ,161, 170], # 1 person
[2 ,11, 24],# 2 bicycle
[3, 66, 76 ], # 3 car
[ 4, 147, 160], # 4 motorcycle
[ 5, 1, 10], # 5 airplane
[ 6, 55, 65], # 6 bus
[ 7, 187, 194], # 7 train
[ 8, 568, 576], # 8 truck
[ 9, 32, 46], # 9 boat
[ 10, 563, 567], # 10 traffic light
[ 11, 326, 330], # 11 fire_hydrant
[ 12, 503, 506], # 12 stop_sign
[ 13, 415, 418], # 13 parking_meter
[ 14, 244, 247], # 14 bench
[ 15, 25, 31], # 15 bird
[ 16, 77, 86], # 16 cat
[ 17, 112, 129], # 17 dog
[ 18, 130, 146], # 18 horse
[ 19, 175, 186], # 19 sheep
[ 20, 97, 107], # 20 cow
[ 21, 314, 325], # 21 elephant
[ 22, 236, 239], # 22 bear
[ 23, 596, 600], # 23 zebra
[ 24, 343, 348], # 24 giraffe
[ 25, 209, 214], # 25 backpack
[ 26, 577, 584], # 26 umbrella
[ 27, 353, 356], # 27 handbag
[ 28, 539, 546], # 28 tie
[ 29, 507, 516], # 29 suitcase
[ 30, 337, 342], # 30 Frisbee
[ 31, 464, 474], # 31 skis
[ 32, 475, 483], # 32 snowboard
[ 33, 489, 502], # 33 sports_ball
[ 34, 369, 376], # 34 kite
[ 35, 225, 232], # 35 baseball_bat
[ 36, 233, 235], # 36 baseball_glove
[ 37, 454, 463], # 37 skateboard
[ 38, 517, 528], # 38 surfboard
[ 39, 534, 538], # 39 tennis_racket
[ 40, 47, 54], # 40 bottle
[ 41, 589, 595], # 41 wine_glass
[ 42, 296, 305], # 42 cup
[ 43, 331, 336], # 43 fork
[ 44, 377, 383], # 44 knife
[ 45, 484, 488], # 45 spoon
[ 46, 253, 257], # 46 bowl
[ 47, 215, 224], # 47 banana
[ 48, 199, 208], # 48 apple
[ 49, 439, 445], # 49 sandwich
[ 50, 398, 407], # 50 orange
[ 51, 258, 264], # 51 broccoli
[ 52, 274, 283], # 52 carrot
[ 53, 357, 363], # 53 hot_dog
[ 54, 419, 429], # 54 pizza
[ 55, 306, 313], # 55 donut
[ 56, 265, 273], # 56 cake
[ 57, 87, 92], # 57 chair
[ 58, 93, 96], # 58 couch
[ 59, 171, 174], # 59 potted_plant
[ 60, 240, 243], # 60 bed
[ 61, 108, 111], # 61 dining_table
[ 62, 551, 558], # 62 toilet
[ 63, 195, 198], # 63 TV
[ 64, 384, 389], # 64 laptop
[ 65, 394, 397], # 65 mouse
[ 66, 435, 438], # 66 remote
[ 67, 364, 368], # 67 keyboard
[ 68, 284, 290], # 68 cell_phone
[ 69, 390, 393], # 69 microwave
[ 70, 408, 414], # 70 oven
[ 71, 547, 550], # 71 toaster
[ 72, 450, 453], # 72 sink
[ 73, 430, 434], # 73 refrigerator
[ 74, 248, 252], # 74 book
[ 75, 291, 295], # 75 clock
[ 76, 585, 588], # 76 vase
[ 77, 446, 449], # 77 scissors
[ 78, 529, 533], # 78 teddy_bear
[ 79, 349, 352], # 79 hair_drier
[ 80, 559, 562], # 80 toothbrush
]
import datetime
# from multiprocessing import Pool
#
# process_num = 16 if fuse_type == 'spv' else 2
# # global pool
# # if pool is None:
# pool = Pool(processes=process_num)
# def func(item):
#
# save_HICO(HICO, HICO_dir, item[0], item[1], item[2])
#
# gpool.starmap(save_HICO1, zip(repeat(output_file), repeat(HICO_dir), params, repeat(fuse_type)))
from sys import version_info
if version_info.major == 3:
HICO = pickle.load(open(output_file, "rb"), encoding='latin1')
else:
HICO = pickle.load(open(output_file, "rb"))
print('Load HICO sucessfully', datetime.datetime.now())
for p in params:
# print(p)
save_HICO(HICO, HICO_dir, p[0], p[1], p[2], fuse_type)
# print('end', p)
# pool.close()
# pool.join()
# pool.terminate()
# del pool
# import gc
# gc.collect()
# pool.map(save_HICO, params)
print("Finish save HICO", datetime.datetime.now())
| 33.162304
| 102
| 0.511762
| 1,744
| 12,668
| 3.623853
| 0.287271
| 0.03038
| 0.012184
| 0.012658
| 0.804747
| 0.787975
| 0.782278
| 0.730696
| 0.730696
| 0.717722
| 0
| 0.192285
| 0.334938
| 12,668
| 381
| 103
| 33.249344
| 0.557864
| 0.279918
| 0
| 0.80427
| 0
| 0
| 0.026011
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017794
| false
| 0
| 0.035587
| 0
| 0.064057
| 0.014235
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
85bbee7a72432baab70dcf67b4519676dc5248b6
| 9,997
|
py
|
Python
|
tests/query/v2/match/test_variable_length_relationships.py
|
tom-chensf/nebula-graph
|
0f2f0d02879bfd2421815a26158e8fa030f19b62
|
[
"Apache-2.0"
] | null | null | null |
tests/query/v2/match/test_variable_length_relationships.py
|
tom-chensf/nebula-graph
|
0f2f0d02879bfd2421815a26158e8fa030f19b62
|
[
"Apache-2.0"
] | null | null | null |
tests/query/v2/match/test_variable_length_relationships.py
|
tom-chensf/nebula-graph
|
0f2f0d02879bfd2421815a26158e8fa030f19b62
|
[
"Apache-2.0"
] | null | null | null |
# --coding:utf-8--
#
# Copyright (c) 2020 vesoft inc. All rights reserved.
#
# This source code is licensed under Apache 2.0 License,
# attached with Common Clause Condition 1.0, found in the LICENSES directory.
import pytest
from tests.common.nebula_test_suite import NebulaTestSuite
@pytest.mark.usefixtures('set_vertices_and_edges')
class TestVariableLengthRelationshipMatch(NebulaTestSuite):
@classmethod
def prepare(cls):
cls.use_nba()
@pytest.mark.skip
def test_to_be_deleted(self):
# variable steps
stmt = 'MATCH (v:player:{name: "abc"}) -[r*1..3]-> () return *'
self.fail_query(stmt)
stmt = 'MATCH (v:player:{name: "abc"}) -[r*..3]-> () return *'
self.fail_query(stmt)
stmt = 'MATCH (v:player:{name: "abc"}) -[r*1..]-> () return *'
self.fail_query(stmt)
@pytest.mark.skip
def test_hops_0_to_1(self, like, serve):
VERTICES, EDGES = self.VERTEXS, self.EDGS
def like_row(dst: str):
return [[like('Tracy McGrady', dst)], VERTICES[dst]]
def serve_row(dst):
return [[serve('Tracy McGrady', dst)], VERTICES[dst]]
# single both direction edge with properties
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:serve*0..1{start_year: 2000}]-(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
[[], VERTICES["Tracy McGrady"]],
serve_row("Magic")
]
}
self.check_rows_with_header(stmt, expected)
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:like*0..1{likeness: 90}]-(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
[[], VERTICES["Tracy McGrady"]],
like_row("Kobe Bryant"),
like_row("Grant Hill"),
like_row("Rudy Gay"),
like_row("Vince Carter"),
like_row("Yao Ming"),
like_row("Grant Hill"), # like each other
]
}
self.check_rows_with_header(stmt, expected)
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:like*1{likeness: 90}]-(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
like_row("Kobe Bryant"),
like_row("Grant Hill"),
like_row("Rudy Gay"),
like_row("Vince Carter"),
like_row("Yao Ming"),
like_row("Grant Hill"), # like each other
]
}
self.check_rows_with_header(stmt, expected)
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:like*0{likeness: 90}]-(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
[[], VERTICES["Tracy McGrady"]],
]
}
self.check_rows_with_header(stmt, expected)
# single direction edge with properties
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:like*0..1{likeness: 90}]->(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
[[], VERTICES["Tracy McGrady"]],
like_row("Kobe Bryant"),
like_row("Grant Hill"),
like_row("Rudy Gay"),
]
}
self.check_rows_with_header(stmt, expected)
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:like*0{likeness: 90}]->(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
[[], VERTICES["Tracy McGrady"]],
]
}
self.check_rows_with_header(stmt, expected)
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:like*1{likeness: 90}]->(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
like_row("Kobe Bryant"),
like_row("Grant Hill"),
like_row("Rudy Gay"),
]
}
self.check_rows_with_header(stmt, expected)
# single both direction edge without properties
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:serve*0..1]-(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
[[], VERTICES["Tracy McGrady"]],
serve_row("Raptors"),
serve_row("Magic"),
serve_row("Spurs"),
serve_row("Rockets"),
]
}
self.check_rows_with_header(stmt, expected)
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:like*0..1]-(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
[[], VERTICES["Tracy McGrady"]],
like_row("Kobe Bryant"),
like_row("Grant Hill"),
like_row("Rudy Gay"),
like_row("Vince Carter"),
like_row("Yao Ming"),
like_row("Grant Hill"), # like each other
]
}
self.check_rows_with_header(stmt, expected)
# multiple both direction edge with properties
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:serve|like*0..1{start_year: 2000}]-(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
[[], VERTICES["Tracy McGrady"]],
serve_row("Magic"),
]
}
self.check_rows_with_header(stmt, expected)
# multiple single direction edge with properties
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:serve|like*0..1{start_year: 2000}]->(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
[[], VERTICES["Tracy McGrady"]],
serve_row("Magic"),
]
}
self.check_rows_with_header(stmt, expected)
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:serve|like*0..1{likeness: 90}]->(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
[[], VERTICES["Tracy McGrady"]],
like_row("Kobe Bryant"),
like_row("Grant Hill"),
like_row("Rudy Gay"),
]
}
self.check_rows_with_header(stmt, expected)
# multiple both direction edge with properties
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:serve|like*0..1]-(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
[[], VERTICES["Tracy McGrady"]],
like_row("Kobe Bryant"),
like_row("Grant Hill"),
like_row("Rudy Gay"),
like_row("Vince Carter"),
like_row("Yao Ming"),
like_row("Grant Hill"),
serve_row("Raptors"),
serve_row("Magic"),
serve_row("Spurs"),
serve_row("Rockets"),
]
}
self.check_rows_with_header(stmt, expected)
# multiple single direction edge with properties
stmt = '''
MATCH (:player{name:"Tracy McGrady"})-[e:serve|like*0..1]->(v)
RETURN e, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": [
[[], VERTICES["Tracy McGrady"]],
like_row("Kobe Bryant"),
like_row("Grant Hill"),
like_row("Rudy Gay"),
serve_row("Raptors"),
serve_row("Magic"),
serve_row("Spurs"),
serve_row("Rockets"),
]
}
self.check_rows_with_header(stmt, expected)
@pytest.mark.skip
def test_mix_hops(self):
stmt = '''
MATCH (:player{name: "Tim Duncan"})-[e1:like]->()-[e2:serve*0..3]->()<-[e3:serve]-(v)
RETURN e1, e2, e3, v
'''
expected = {
"column_names": ['e', 'v'],
"rows": []
}
self.check_rows_with_header(stmt, expected)
def test_more_cases(self, like, serve, like_2hop):
# stmt = '''
# MATCH (v:player{name: 'Tim Duncan'})-[e:like*0]-()
# RETURN e
# '''
# stmt = '''
# MATCH (v:player{name: 'Tim Duncan'})-[e:like*0..0]-()
# RETURN e
# '''
# stmt = '''
# MATCH (v:player{name: 'Tim Duncan'})-[e:like*]-()
# RETURN e
# '''
# stmt = '''
# MATCH (v:player{name: 'Tim Duncan'})-[e:like*0..0]-()-[e2:like*0..0]-()
# RETURN e, e2
# '''
stmt='''
MATCH p=(v:player{name: 'Tim Duncan'})-[:like|:serve*1..3]->(v1)
WHERE e[0].likeness>90
RETURN p
'''
resp = self.execute(stmt)
self.check_resp_failed(resp)
self.check_error_msg(resp, "SemanticError: Alias used but not defined: `e'")
stmt='''
MATCH p=(v:player{name: 'Tim Duncan'})-[:like|:serve*1..3]->(v1)
RETURN e
'''
resp = self.execute(stmt)
self.check_resp_failed(resp)
self.check_error_msg(resp, "SemanticError: Alias used but not defined: `e'")
stmt='''
MATCH p=(v:player{name: 'Tim Duncan'})-[:like|:serve*1..3]->(v1)
WHERE e[0].likeness+e[1].likeness>90
RETURN p
'''
resp = self.execute(stmt)
self.check_resp_failed(resp)
self.check_error_msg(resp, "SemanticError: Alias used but not defined: `e'")
| 31.143302
| 93
| 0.468541
| 1,054
| 9,997
| 4.302657
| 0.132827
| 0.057111
| 0.049614
| 0.062845
| 0.860419
| 0.827122
| 0.827122
| 0.813671
| 0.806615
| 0.806615
| 0
| 0.015692
| 0.368911
| 9,997
| 320
| 94
| 31.240625
| 0.703123
| 0.090027
| 0
| 0.734615
| 0
| 0.065385
| 0.352643
| 0.075582
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026923
| false
| 0
| 0.007692
| 0.007692
| 0.046154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a472d9dbbef1548c74841070f5d2c899c69f8e82
| 14,181
|
py
|
Python
|
drf_tester/viewsets/staff.py
|
samuelmovi/drf-tester
|
5ad4cfadbec98f13f73b656c8d690c591d09c216
|
[
"MIT"
] | 1
|
2021-09-10T11:46:29.000Z
|
2021-09-10T11:46:29.000Z
|
drf_tester/viewsets/staff.py
|
samuelmovi/drf-tester
|
5ad4cfadbec98f13f73b656c8d690c591d09c216
|
[
"MIT"
] | null | null | null |
drf_tester/viewsets/staff.py
|
samuelmovi/drf-tester
|
5ad4cfadbec98f13f73b656c8d690c591d09c216
|
[
"MIT"
] | null | null | null |
"""
Collection of classes to be used in the testing of access to a Viewset by a STAFF user
"""
from rest_framework import status
from rest_framework.test import force_authenticate
from ..utils import BaseDrfTest
class NoList(BaseDrfTest):
def test_staff_user_cannot_list_existing_instance(self):
"""Staff user cannot list existing instances"""
# get user
user = self.get_active_staff(self.user_data)
# Create instance
instances = self.get_model_instances()
# Query endpoint
request = self.requests.get(self.endpoint, data={})
force_authenticate(request, user=user)
response = self.view(request)
# Assert forbidden access
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class NoListOwned(BaseDrfTest):
def test_staff_user_cannot_list_owned_instance(self):
"""Staff user cannot list owned instances"""
# get user
user = self.get_active_staff(self.user_data)
# Create instance
instances = self.get_model_instances()
for x in instances:
setattr(x, self.USER_FIELD_NAME, user)
x.save()
# Query endpoint
request = self.requests.get(self.endpoint, data={})
force_authenticate(request, user=user)
response = self.view(request)
# Assert forbidden access
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class NoRetrieve(BaseDrfTest):
def test_staff_user_cannot_get_existing_instance(self):
"""Staff user cannot get details on existing instance"""
# get user
user = self.get_active_staff(self.user_data)
# Create instance
instance = self.factory()
# Query endpoint
request = self.requests.get(self.endpoint, data={})
force_authenticate(request, user=user)
response = self.view(request, pk=instance.id)
# Assert forbidden access
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class NoRetrieveOwned(BaseDrfTest):
def test_staff_user_cannot_get_owned_instance(self):
"""Staff user cannot get details on own instance"""
# get user
user = self.get_active_staff(self.user_data)
# Create instance
instance = self.factory()
setattr(instance, self.USER_FIELD_NAME, user)
instance.save()
# Query endpoint
request = self.requests.get(self.endpoint, data={})
force_authenticate(request, user=user)
response = self.view(request, pk=instance.id)
# Assert forbidden access
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class NoCreate(BaseDrfTest):
def test_staff_user_cannot_create_instance(self):
"""Staff user cannot create new instance"""
# get user
user = self.get_active_staff(self.user_data)
# Query endpoint
request = self.requests.post(self.endpoint, data={})
force_authenticate(request, user=user)
response = self.view(request)
# Assert access is forbidden
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class NoUpdate(BaseDrfTest):
def test_staff_user_cannot_modify_existing_instance(self):
"""Staff user cannot modify existing instance"""
# get user
user = self.get_active_staff(self.user_data)
# Create instance
instance = self.factory()
# Query endpoint
request = self.requests.put(self.endpoint, data={})
force_authenticate(request, user=user)
response = self.view(request, pk=instance.id)
# Assert forbidden access
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class NoUpdateOwned(BaseDrfTest):
def test_staff_user_cannot_modify_owned_instance(self):
"""Staff user cannot modify owned instance"""
# get user
user = self.get_active_staff(self.user_data)
# Create instance
instance = self.factory()
setattr(instance, self.USER_FIELD_NAME, user)
instance.save()
# Query endpoint
request = self.requests.put(self.endpoint, data={})
force_authenticate(request, user=user)
response = self.view(request, pk=instance.id)
# Assert forbidden access
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class NoDestroy(BaseDrfTest):
def test_staff_user_cannot_delete_existing_instance(self):
"""Staff user cannot delete existing instance"""
# get user
user = self.get_active_staff(self.user_data)
# Create instances
instance = self.factory()
# Query endpoint
request = self.requests.delete(self.endpoint)
force_authenticate(request, user=user)
response = self.view(request, pk=instance.id)
# Assert access forbidden
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
# Assert instance still exists on db
self.assertTrue(self.model.objects.filter(id=instance.pk).exists())
class NoDestroyOwned(BaseDrfTest):
def test_staff_user_cannot_delete_owned_instance(self):
"""Staff user cannot delete owned instance"""
# get user
user = self.get_active_staff(self.user_data)
# Create instances
instance = self.factory()
setattr(instance, self.USER_FIELD_NAME, user)
instance.save()
# Query endpoint
request = self.requests.delete(self.endpoint)
force_authenticate(request, user=user)
response = self.view(request, pk=instance.id)
# Assert access forbidden
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
# Assert instance still exists on db
self.assertTrue(self.model.objects.filter(id=instance.pk).exists())
class CanList(BaseDrfTest):
def test_staff_user_can_list_instances(self):
"""Staff user can list instances"""
# get user
user = self.get_active_staff(self.user_data)
# Create instances
instances = self.get_model_instances()
# Request list
request = self.requests.get(self.endpoint)
force_authenticate(request, user=user)
response = self.view(request)
# Assert access is allowed
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Assert all instances are returned
self.assertEqual(len(instances), len(response.data))
class CanListOwned(BaseDrfTest):
def test_staff_user_can_list_owned_instances(self):
"""Staff user can list owned instances"""
# get user
user = self.get_active_staff(self.user_data)
# Create instances
instances = self.get_model_instances()
for x in instances:
setattr(x, self.USER_FIELD_NAME, user)
x.save()
# Request list
request = self.requests.get(self.endpoint)
force_authenticate(request, user=user)
response = self.view(request)
# Assert access is allowed
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Assert all instances are returned
self.assertEqual(len(instances), len(response.data))
class CanRetrieve(BaseDrfTest):
def test_staff_user_can_get_instance(self):
"""Staff user can get existing instance"""
# get user
user = self.get_active_staff(self.user_data)
# Create instances
instance = self.factory()
# Request list
request = self.requests.get(self.endpoint)
force_authenticate(request, user=user)
response = self.view(request, pk=instance.id)
# Assert access is allowed
self.assertEqual(response.status_code, status.HTTP_200_OK)
class CanRetrieveOwned(BaseDrfTest):
def test_staff_user_can_get_owned_instance(self):
"""Staff user can get owned instance"""
# get user
user = self.get_active_staff(self.user_data)
# Create instances
instance = self.factory()
setattr(instance, self.USER_FIELD_NAME, user)
instance.save()
# Request list
request = self.requests.get(self.endpoint)
force_authenticate(request, user=user)
response = self.view(request, pk=instance.id)
# Assert access is allowed
self.assertEqual(response.status_code, status.HTTP_200_OK)
class CanCreate(BaseDrfTest):
def test_staff_user_can_create_instance(self):
"""Staff user can create new instance"""
# get user
user = self.get_active_staff(self.user_data)
# Query endpoint
request = self.requests.post(self.endpoint, data=self.instance_data)
force_authenticate(request, user=user)
response = self.view(request)
# Assert endpoint returns created status
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Assert instance exists on db
self.assertTrue(self.model.objects.filter(id=response.data["id"]).exists())
self.check_equal_data(self.instance_data, response.data)
class CanCreateOwned(BaseDrfTest):
def test_staff_user_can_create_owned_instance(self):
"""Staff user can create new owned instance"""
# get user
user = self.get_active_staff(self.user_data)
# add as owner
self.instance_data[self.USER_FIELD_NAME] = user.id
# Query endpoint
request = self.requests.post(self.endpoint, data=self.instance_data)
force_authenticate(request, user=user)
response = self.view(request)
# Assert endpoint returns created status
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Assert instance exists on db
self.assertTrue(self.model.objects.filter(id=response.data["id"]).exists())
self.check_equal_data(self.instance_data, response.data)
class CanUpdate(BaseDrfTest):
def test_staff_user_can_modify_instance(self):
"""Staff user can modify existing instance"""
# get user
user = self.get_active_staff(self.user_data)
# Create instances
instance = self.factory()
# Query endpoint
request = self.requests.put(self.endpoint, self.instance_data)
force_authenticate(request, user=user)
response = self.view(request, pk=instance.id)
# Assert endpoint returns OK code
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.check_equal_data(self.instance_data, response.data)
class CanUpdateOwned(BaseDrfTest):
def test_staff_user_can_modify_owned_instance(self):
"""Staff user can modify owned instance"""
# get user
user = self.get_active_staff(self.user_data)
# Create instances
instance = self.factory()
setattr(instance, self.USER_FIELD_NAME, user)
instance.save()
# Query endpoint
request = self.requests.put(self.endpoint, self.instance_data)
force_authenticate(request, user=user)
response = self.view(request, pk=instance.id)
# Assert endpoint returns OK code
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.check_equal_data(self.instance_data, response.data)
class CanDestroy(BaseDrfTest):
def test_staff_user_can_delete_instance(self):
"""Staff user can delete existing instance"""
# get user
user = self.get_active_staff(self.user_data)
# Create instances
instance = self.factory()
# Query endpoint
request = self.requests.delete(self.endpoint)
force_authenticate(request, user=user)
response = self.view(request, pk=instance.id)
# assert 204 no content
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
# Assert instance doesn't exists anymore on db
self.assertFalse(self.model.objects.filter(id=instance.pk).exists())
class CanDestroyOwned(BaseDrfTest):
def test_staff_user_can_delete_owned_instance(self):
"""Staff user can delete owned instance"""
# get user
user = self.get_active_staff(self.user_data)
# Create instances
instance = self.factory()
setattr(instance, self.USER_FIELD_NAME, user)
instance.save()
# Query endpoint
request = self.requests.delete(self.endpoint)
force_authenticate(request, user=user)
response = self.view(request, pk=instance.id)
# assert 204 no content
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
# Assert instance doesn't exists anymore on db
self.assertFalse(self.model.objects.filter(id=instance.pk).exists())
class CanPaginate(BaseDrfTest):
def test_staff_user_can_paginate_instances(self):
"""Staff user can paginate instances"""
limit = 5
offset = 10
# get user
user = self.get_active_staff(self.user_data)
# create instances
instances = self.get_model_instances()
# Request list
url = f"{self.endpoint}?limit={limit}&offset={offset}"
request = self.requests.get(url)
force_authenticate(request, user=user)
response = self.view(request)
# Assert access is allowed
self.assertEqual(response.status_code, status.HTTP_200_OK)
# assert only 2 instances in response
payload = response.json()
self.assertTrue(len(payload["results"]) <= 5)
# EXTENDED CLASSES
class StaffFullAccess(CanList, CanRetrieve, CanCreate, CanUpdate, CanDestroy):
"""
Staff user has full access to endopint
"""
pass
class StaffNoAccess(NoList, NoRetrieve, NoCreate, NoUpdate, NoDestroy):
"""
Staff user has no access to endopint
"""
pass
class StaffReadOnly(CanList, CanRetrieve, NoCreate, NoUpdate, NoDestroy):
"""
Staff user has only read access to endopint
"""
pass
class StaffOwner(CanListOwned, CanRetrieveOwned, CanCreateOwned, CanUpdateOwned, CanDestroyOwned):
"""
Staff user can access intances owned by user
"""
pass
| 36.929688
| 98
| 0.67527
| 1,682
| 14,181
| 5.51308
| 0.083829
| 0.043675
| 0.029764
| 0.049606
| 0.906503
| 0.886337
| 0.823358
| 0.7469
| 0.738488
| 0.738488
| 0
| 0.00655
| 0.235667
| 14,181
| 383
| 99
| 37.02611
| 0.848971
| 0.18828
| 0
| 0.730392
| 0
| 0
| 0.004976
| 0.003998
| 0
| 0
| 0
| 0
| 0.142157
| 1
| 0.098039
| false
| 0.019608
| 0.014706
| 0
| 0.230392
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a494b54b716174578140bc992363b676003a99ac
| 116
|
py
|
Python
|
ocr/data_helper/__init__.py
|
aksharsramesh/optical-character-recognition
|
ccd9b9eb17aeab8d67be2fc842228e3280f3ff2b
|
[
"MIT"
] | null | null | null |
ocr/data_helper/__init__.py
|
aksharsramesh/optical-character-recognition
|
ccd9b9eb17aeab8d67be2fc842228e3280f3ff2b
|
[
"MIT"
] | null | null | null |
ocr/data_helper/__init__.py
|
aksharsramesh/optical-character-recognition
|
ccd9b9eb17aeab8d67be2fc842228e3280f3ff2b
|
[
"MIT"
] | null | null | null |
# import the necessary packages
from .data_helper import load_mnist_dataset
from .data_helper import load_az_dataset
| 38.666667
| 43
| 0.87069
| 18
| 116
| 5.277778
| 0.611111
| 0.168421
| 0.294737
| 0.421053
| 0.505263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 116
| 3
| 44
| 38.666667
| 0.913462
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a49d295aabcb67672071aae5309dcbad5b403a40
| 4,718
|
py
|
Python
|
data/config.py
|
isLouisHsu/Pytorch_Retinaface
|
2db5e9aaec42d9605494032a5fa70fb7b82831de
|
[
"MIT"
] | null | null | null |
data/config.py
|
isLouisHsu/Pytorch_Retinaface
|
2db5e9aaec42d9605494032a5fa70fb7b82831de
|
[
"MIT"
] | null | null | null |
data/config.py
|
isLouisHsu/Pytorch_Retinaface
|
2db5e9aaec42d9605494032a5fa70fb7b82831de
|
[
"MIT"
] | null | null | null |
# config.py
cfg_mnet = {
'name': 'mobilenet0.25',
'in_channels': 3,
'min_sizes': [[16, 32], [64, 128], [256, 512]],
'steps': [8, 16, 32],
'variance': [0.1, 0.2],
'clip': False,
'loc_weight': 2.0,
'gpu_train': True,
'batch_size': 64,
'ngpu': 1,
'epoch': 200,
'decay1': 190,
'decay2': 220,
'image_size': 480,
'pretrain': "./weights/pretrained/mobilenetV1X0.25_pretrain.tar",
'return_layers': {'stage1': 1, 'stage2': 2, 'stage3': 3},
'in_channel': 32,
'out_channel': 64
}
cfg_re50 = {
'name': 'Resnet50',
'in_channels': 3,
'min_sizes': [[16, 32], [64, 128], [256, 512]],
'steps': [8, 16, 32],
'variance': [0.1, 0.2],
'clip': False,
'loc_weight': 2.0,
'gpu_train': True,
'batch_size': 8,
'ngpu': 1,
'epoch': 100,
'decay1': 70,
'decay2': 90,
'image_size': 840,
'pretrain': "/home/louishsu/.cache/torch/hub/checkpoints/resnet50-19c8e357.pth",
'return_layers': {'layer2': 1, 'layer3': 2, 'layer4': 3},
'in_channel': 256,
'out_channel': 256
}
# --------------------------------------------------------------------------------------
cfg_re18 = {
'name': 'Resnet18',
'in_channels': 3,
'min_sizes': [[16, 32], [64, 128], [256, 512]],
'steps': [8, 16, 32],
'variance': [0.1, 0.2],
'clip': False,
'loc_weight': 2.0,
'gpu_train': True,
'batch_size': 48,
'ngpu': 1,
'epoch': 100,
'decay1': 70,
'decay2': 90,
'image_size': 480,
'pretrain': "/home/louishsu/.cache/torch/hub/checkpoints/resnet18-5c106cde.pth",
'return_layers': {'layer2': 1, 'layer3': 2, 'layer4': 3},
'in_channel': 64,
'out_channel': 256
}
cfg_re34 = {
'name': 'Resnet34',
'in_channels': 3,
'min_sizes': [[16, 32], [64, 128], [256, 512]],
'steps': [8, 16, 32],
'variance': [0.1, 0.2],
'clip': False,
'loc_weight': 2.0,
'gpu_train': True,
'batch_size': 48,
'ngpu': 1,
'epoch': 100,
'decay1': 70,
'decay2': 90,
'image_size': 480,
'pretrain': "/home/louishsu/.cache/torch/hub/checkpoints/resnet34-333f7ec4.pth",
'return_layers': {'layer2': 1, 'layer3': 2, 'layer4': 3},
'in_channel': 64,
'out_channel': 256
}
cfg_eff_b0 = {
'name': 'Efficientnet-b0',
'in_channels': 3,
'min_sizes': [[16, 32], [64, 128], [256, 512]],
'steps': [8, 16, 32],
'variance': [0.1, 0.2],
'clip': False,
'loc_weight': 2.0,
'gpu_train': True,
'batch_size': 12,
'ngpu': 1,
'epoch': 100,
'decay1': 70,
'decay2': 90,
'image_size': 480,
'pretrain': "/home/louishsu/.cache/torch/hub/checkpoints/tf_efficientnet_b0_ns-c0e6a31c.pth",
'return_layers': {'2': 3, '4': 5, '6': 7},
'in_channel': None,
'out_channel': 256
}
cfg_eff_b4 = {
'name': 'Efficientnet-b4',
'in_channels': 3,
'min_sizes': [[16, 32], [64, 128], [256, 512]],
'steps': [8, 16, 32],
'variance': [0.1, 0.2],
'clip': False,
'loc_weight': 2.0,
'gpu_train': True,
'batch_size': 12,
'ngpu': 1,
'epoch': 100,
'decay1': 70,
'decay2': 90,
'image_size': 480,
'pretrain': "/home/louishsu/.cache/torch/hub/checkpoints/tf_efficientnet_b4_ns-d6313a46.pth",
'return_layers': {'2': 3, '4': 5, '6': 7},
'in_channel': None,
'out_channel': 256
}
# --------------------------------------------------------------------------------------
cfg_re34_hsfd_finetune = {
'name': 'Resnet34',
'used_channels': [12, 19, 13, 6, 3],
'in_channels': 5,
'min_sizes': [[16, 32], [64, 128], [256, 512]],
'steps': [8, 16, 32],
'variance': [0.1, 0.2],
'clip': False,
'loc_weight': 2.0,
'gpu_train': True,
'batch_size': 32,
'ngpu': 1,
'epoch': 20,
'decay1': 70,
'decay2': 90,
'image_size': 320,
'pretrain': "/home/louishsu/.cache/torch/hub/checkpoints/resnet34-333f7ec4.pth",
'finetune': "outputs/resnet34_v1/Resnet34_iter_21000_2.5562_.pth",
'return_layers': {'layer2': 1, 'layer3': 2, 'layer4': 3},
'in_channel': 64,
'out_channel': 256
}
cfg_re34_hsfd_not_finetune = {
'name': 'Resnet34',
'used_channels': [12, 19, 13, 6, 3],
'in_channels': 5,
'min_sizes': [[16, 32], [64, 128], [256, 512]],
'steps': [8, 16, 32],
'variance': [0.1, 0.2],
'clip': False,
'loc_weight': 2.0,
'gpu_train': True,
'batch_size': 32,
'ngpu': 1,
'epoch': 100,
'decay1': 70,
'decay2': 90,
'image_size': 320,
'pretrain': "/home/louishsu/.cache/torch/hub/checkpoints/resnet34-333f7ec4.pth",
'finetune': None,
'return_layers': {'layer2': 1, 'layer3': 2, 'layer4': 3},
'in_channel': 64,
'out_channel': 256
}
| 26.655367
| 97
| 0.523527
| 609
| 4,718
| 3.889984
| 0.172414
| 0.027016
| 0.03377
| 0.040523
| 0.832419
| 0.829886
| 0.824821
| 0.806247
| 0.806247
| 0.806247
| 0
| 0.144102
| 0.221916
| 4,718
| 177
| 98
| 26.655367
| 0.501226
| 0.038788
| 0
| 0.762195
| 0
| 0
| 0.433583
| 0.12842
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f10e46f519a19547d255f39dd520b21ce38ebe18
| 1,867
|
py
|
Python
|
checkFolderIntegrity.py
|
armstjc/Retrosheet_DB
|
d4eea9dd244f92a8b9e7680a59f174af67498b6c
|
[
"MIT"
] | 2
|
2021-12-17T04:08:13.000Z
|
2021-12-19T14:05:20.000Z
|
checkFolderIntegrity.py
|
armstjc/Retrosheet_DB
|
d4eea9dd244f92a8b9e7680a59f174af67498b6c
|
[
"MIT"
] | null | null | null |
checkFolderIntegrity.py
|
armstjc/Retrosheet_DB
|
d4eea9dd244f92a8b9e7680a59f174af67498b6c
|
[
"MIT"
] | null | null | null |
import os
def checkFolderCreation():
try:
os.mkdir('raw_data')
except:
pass
try:
os.mkdir('raw_data/zip')
except:
pass
try:
os.mkdir('raw_data/retrosheet')
except:
pass
try:
os.mkdir('raw_data/retrosplit')
except:
pass
try:
os.mkdir('raw_data/retrosheet/team_gamelog')
except:
pass
try:
os.mkdir('raw_data/retrosheet/play_by_play')
except:
pass
try:
os.mkdir('raw_data/retrosheet/play_by_play')
except:
pass
try:
os.mkdir('raw_data/retrosplit/player_gamelog')
except:
pass
try:
os.mkdir('raw_data/retrosplit/team_gamelog')
except:
pass
try:
os.mkdir('raw_data/retrosplit/batting_by_position')
except:
pass
try:
os.mkdir('raw_data/retrosplit/batting_by_runners')
except:
pass
try:
os.mkdir('raw_data/retrosplit/batting_platoon')
except:
pass
try:
os.mkdir('raw_data/retrosplit/batting_platoon')
except:
pass
try:
os.mkdir('raw_data/retrosplit/batting_head_to_head')
except:
pass
try:
os.mkdir('raw_data/retrosplit/pitching_by_runners')
except:
pass
try:
os.mkdir('raw_data/retrosplit/pitching_platoon')
except:
pass
try:
os.mkdir('raw_data/retrosheet/ejections')
except:
pass
try:
os.mkdir('raw_data/retrosheet/transactions')
except:
pass
try:
os.mkdir('raw_data/retrosheet/schedules')
except:
pass
try:
os.mkdir('data')
except:
pass
def main():
checkFolderCreation()
if __name__ == "__main__":
main()
| 16.972727
| 60
| 0.546867
| 202
| 1,867
| 4.826733
| 0.153465
| 0.102564
| 0.205128
| 0.253333
| 0.855385
| 0.817436
| 0.817436
| 0.789744
| 0.544615
| 0.422564
| 0
| 0
| 0.352437
| 1,867
| 110
| 61
| 16.972727
| 0.806452
| 0
| 0
| 0.744186
| 0
| 0
| 0.312634
| 0.275161
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| true
| 0.232558
| 0.011628
| 0
| 0.034884
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
f11f1c43cde469c2d94f69913987d2fd8aa363d2
| 5,654
|
py
|
Python
|
romantic-revolutionaries/test/test_viewcontrol.py
|
LaFeeVert/code-jam-6
|
f65b284c58ae653f9212923c6d3296cf252b6453
|
[
"MIT"
] | null | null | null |
romantic-revolutionaries/test/test_viewcontrol.py
|
LaFeeVert/code-jam-6
|
f65b284c58ae653f9212923c6d3296cf252b6453
|
[
"MIT"
] | 2
|
2020-01-21T19:46:57.000Z
|
2020-01-21T20:33:05.000Z
|
romantic-revolutionaries/test/test_viewcontrol.py
|
LaFeeVert/code-jam-6
|
f65b284c58ae653f9212923c6d3296cf252b6453
|
[
"MIT"
] | 2
|
2020-01-20T23:46:14.000Z
|
2020-01-21T05:50:15.000Z
|
"""Unitest the view control.
Run these tests with either with "pytest" or python -m "unittest"
from within the same directory.
"""
import unittest
from modules.view.viewcontrol import ViewControl
from modules.navigation.navcont import NavControl, Directions
from modules.map.MapControl import DungeonMap
class Observer:
def __init__(self):
self.descriptive_text = ''
def callback(self, descriptive_text):
self.descriptive_text = descriptive_text
# print(self.descriptive_text)
class TestViewControl(unittest.TestCase):
def setUp(self):
# setup the initial test map
DungeonMap.map_vector = [
[1, 0, 0, 0],
[0, 1, 6, 0],
[0, 9, 1, 0],
[0, 0, 0, 0]]
self.ob = Observer()
self.mc = DungeonMap()
self.nc = NavControl()
self.vc = ViewControl()
self.nc.subscribe(self.mc.callback)
self.mc.subscribe(self.vc.callback)
self.vc.subscribe(self.ob.callback)
def test_look(self):
self.nc.go(Directions.NORTH)
self.vc.look(Directions.NORTH)
expected = """You have run into a wall.
There is a wall in front of you.
It is bordered to the left with a dark passage.
It is bordered to the right with a wall.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.vc.look(Directions.SOUTH)
expected = """You have run into a wall.
There is a dark passage ahead of you.
It is bordered to the left with a dark passage.
It is bordered to the right with a wall.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.vc.look(Directions.EAST)
expected = """You have run into a wall.
There is a dark passage ahead of you.
It is bordered to the left with a wall.
It is bordered to the right with a dark passage.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.vc.look(Directions.WEST)
expected = """You have run into a wall.
There is a wall in front of you.
It is bordered to the left with a wall.
It is bordered to the right with a dark passage.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.nc.go(Directions.SOUTH)
self.vc.look(Directions.NORTH)
expected = """There is a dark passage ahead of you.
It is bordered to the left with a wall.
It is bordered to the right with a 6.
At your feet there is 9.
"""
self.assertEqual(self.ob.descriptive_text, expected)
expected = """There is a dark passage ahead of you.
It is bordered to the left with a wall.
It is bordered to the right with a 6.
At your feet there is 9.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.vc.look(Directions.SOUTH)
expected = """There is a wall in front of you.
It is bordered to the left with a wall.
It is bordered to the right with a wall.
At your feet there is 9.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.vc.look(Directions.EAST)
expected = """There is a dark passage ahead of you.
It is bordered to the left with a 6.
It is bordered to the right with a wall.
At your feet there is 9.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.vc.look(Directions.WEST)
expected = """There is a wall in front of you.
It is bordered to the left with a wall.
It is bordered to the right with a wall.
At your feet there is 9.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.nc.go(Directions.EAST)
self.vc.look(Directions.NORTH)
expected = """There is a dark passage ahead of you.
It is bordered to the left with a dark passage.
It is bordered to the right with a wall.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.vc.look(Directions.SOUTH)
expected = """There is a wall in front of you.
It is bordered to the left with a wall.
It is bordered to the right with a wall.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.vc.look(Directions.EAST)
expected = """There is a wall in front of you.
It is bordered to the left with a wall.
It is bordered to the right with a wall.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.vc.look(Directions.WEST)
expected = """There is a dark passage ahead of you.
It is bordered to the left with a wall.
It is bordered to the right with a dark passage.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.nc.go(Directions.WEST)
self.vc.look(Directions.NORTH)
expected = """There is a dark passage ahead of you.
It is bordered to the left with a wall.
It is bordered to the right with a 6.
At your feet there is 9.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.vc.look(Directions.SOUTH)
expected = """There is a wall in front of you.
It is bordered to the left with a wall.
It is bordered to the right with a wall.
At your feet there is 9.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.vc.look(Directions.EAST)
expected = """There is a dark passage ahead of you.
It is bordered to the left with a 6.
It is bordered to the right with a wall.
At your feet there is 9.
"""
self.assertEqual(self.ob.descriptive_text, expected)
self.vc.look(Directions.WEST)
expected = """There is a wall in front of you.
It is bordered to the left with a wall.
It is bordered to the right with a wall.
At your feet there is 9.
"""
self.assertEqual(self.ob.descriptive_text, expected)
| 32.494253
| 65
| 0.671737
| 890
| 5,654
| 4.235955
| 0.091011
| 0.046419
| 0.108223
| 0.12626
| 0.820955
| 0.818302
| 0.811671
| 0.811671
| 0.811671
| 0.811671
| 0
| 0.006988
| 0.240715
| 5,654
| 173
| 66
| 32.682081
| 0.871186
| 0.032013
| 0
| 0.802817
| 0
| 0
| 0.429094
| 0
| 0
| 0
| 0
| 0
| 0.119718
| 1
| 0.028169
| false
| 0.105634
| 0.028169
| 0
| 0.070423
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
f12aabd1f6418eee7af037f399cf036bc5f2db08
| 165
|
py
|
Python
|
google_images_download/__init__.py
|
Aerobautics/google-images-download
|
52ed15b3aae9abdb68b0b8567115ac0a42d0a93a
|
[
"MIT"
] | null | null | null |
google_images_download/__init__.py
|
Aerobautics/google-images-download
|
52ed15b3aae9abdb68b0b8567115ac0a42d0a93a
|
[
"MIT"
] | null | null | null |
google_images_download/__init__.py
|
Aerobautics/google-images-download
|
52ed15b3aae9abdb68b0b8567115ac0a42d0a93a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from __future__ import absolute_import
def main():
import google_images_download.google_images_download
if __name__ == '__main__':
main()
| 18.333333
| 53
| 0.781818
| 22
| 165
| 5.090909
| 0.681818
| 0.214286
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 165
| 8
| 54
| 20.625
| 0.772414
| 0.121212
| 0
| 0
| 0
| 0
| 0.055556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2d20150f4ff1fc76d598cb2fa324cba41a39aa2a
| 266
|
py
|
Python
|
torchvision_3d/models/__init__.py
|
rubythalib33/3D-Torchvision
|
7dab0a3d1d83e6046320f879af2bff28b31310ab
|
[
"MIT"
] | 4
|
2022-03-09T02:53:12.000Z
|
2022-03-10T14:35:06.000Z
|
torchvision_3d/models/__init__.py
|
rubythalib33/3D-Torchvision
|
7dab0a3d1d83e6046320f879af2bff28b31310ab
|
[
"MIT"
] | null | null | null |
torchvision_3d/models/__init__.py
|
rubythalib33/3D-Torchvision
|
7dab0a3d1d83e6046320f879af2bff28b31310ab
|
[
"MIT"
] | 1
|
2022-03-10T14:35:08.000Z
|
2022-03-10T14:35:08.000Z
|
from torchvision_3d.models.alexnet import *
from torchvision_3d.models.vgg import *
from torchvision_3d.models.resnet import *
from torchvision_3d.models.densenet import *
from torchvision_3d.models.squeezenet import *
from torchvision_3d.models.mobilenetv2 import *
| 44.333333
| 47
| 0.845865
| 36
| 266
| 6.083333
| 0.305556
| 0.410959
| 0.465753
| 0.630137
| 0.6621
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028807
| 0.086466
| 266
| 6
| 47
| 44.333333
| 0.872428
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2d205b4bdbb6b2e7797ab66ee1b73933fe7ac633
| 248
|
py
|
Python
|
src/behavior_tree_learning/gp.py
|
dgerod/behavior_tree_learning
|
71da80c91ecd48fd5da377f83604b62112ba9629
|
[
"Apache-2.0"
] | 7
|
2022-02-09T12:51:51.000Z
|
2022-03-19T14:40:16.000Z
|
src/behavior_tree_learning/gp.py
|
dgerod/behavior_tree_learning
|
71da80c91ecd48fd5da377f83604b62112ba9629
|
[
"Apache-2.0"
] | 2
|
2022-02-03T10:54:41.000Z
|
2022-02-15T10:32:03.000Z
|
src/behavior_tree_learning/gp.py
|
dgerod/behavior_tree_learning
|
71da80c91ecd48fd5da377f83604b62112ba9629
|
[
"Apache-2.0"
] | null | null | null |
from behavior_tree_learning.core.gp import GeneticEnvironment, GeneticOperators
from behavior_tree_learning.core.gp import GeneticParameters, GeneticSelectionMethods, TraceConfiguration
from behavior_tree_learning.core.gp import GeneticProgramming
| 62
| 105
| 0.903226
| 27
| 248
| 8.074074
| 0.481481
| 0.165138
| 0.220183
| 0.330275
| 0.495413
| 0.495413
| 0.495413
| 0
| 0
| 0
| 0
| 0
| 0.060484
| 248
| 3
| 106
| 82.666667
| 0.935622
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7469eebdd1b10b1d2ecf28291f3dbd2919696c12
| 9,724
|
py
|
Python
|
axelrod/tests/unit/test_qlearner.py
|
t0nyt93/Axelroddd
|
66d95378d3ece8b32afeb1c77d305397bd9a815e
|
[
"MIT"
] | null | null | null |
axelrod/tests/unit/test_qlearner.py
|
t0nyt93/Axelroddd
|
66d95378d3ece8b32afeb1c77d305397bd9a815e
|
[
"MIT"
] | null | null | null |
axelrod/tests/unit/test_qlearner.py
|
t0nyt93/Axelroddd
|
66d95378d3ece8b32afeb1c77d305397bd9a815e
|
[
"MIT"
] | 1
|
2019-03-11T08:56:09.000Z
|
2019-03-11T08:56:09.000Z
|
"""Tests for the QLearner strategies."""
import random
import axelrod
from axelrod import simulate_play, Game
from .test_player import TestPlayer, test_responses
C, D = axelrod.Actions.C, axelrod.Actions.D
class TestRiskyQLearner(TestPlayer):
name = 'Risky QLearner'
player = axelrod.RiskyQLearner
expected_classifier = {
'memory_depth': float('inf'),
'stochastic': True,
'makes_use_of': set(["game"]),
'long_run_time': False,
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def test_payoff_matrix(self):
(R, P, S, T) = Game().RPST()
payoff_matrix = {C: {C: R, D: S}, D: {C: T, D: P}}
p1 = self.player()
self.assertEqual(p1.payoff_matrix, payoff_matrix)
def test_qs_update(self):
"""Test that the q and v values update."""
random.seed(5)
p1 = axelrod.RiskyQLearner()
p2 = axelrod.Cooperator()
simulate_play(p1, p2)
self.assertEqual(p1.Qs, {'': {C: 0, D: 0.9}, '0.0': {C: 0, D: 0}})
simulate_play(p1, p2)
self.assertEqual(p1.Qs,
{'': {C: 0, D: 0.9}, '0.0': {C: 2.7, D: 0},
'C1.0': {C: 0, D: 0}})
def test_vs_update(self):
"""Test that the q and v values update."""
random.seed(5)
p1 = axelrod.RiskyQLearner()
p2 = axelrod.Cooperator()
simulate_play(p1, p2)
self.assertEqual(p1.Vs, {'': 0.9, '0.0': 0})
simulate_play(p1, p2)
self.assertEqual(p1.Vs,{'': 0.9, '0.0': 2.7, 'C1.0': 0})
def test_prev_state_updates(self):
"""Test that the q and v values update."""
random.seed(5)
p1 = axelrod.RiskyQLearner()
p2 = axelrod.Cooperator()
simulate_play(p1, p2)
self.assertEqual(p1.prev_state, '0.0')
simulate_play(p1, p2)
self.assertEqual(p1.prev_state, 'C1.0')
def test_strategy(self):
"""Tests that it chooses the best strategy."""
random.seed(5)
p1 = axelrod.RiskyQLearner()
p1.state = 'CCDC'
p1.Qs = {'': {C: 0, D: 0}, 'CCDC': {C: 2, D: 6}}
p2 = axelrod.Cooperator()
test_responses(self, p1, p2, [C, D, C, C, D, C, C])
def test_reset_method(self):
"""Test the reset method."""
P1 = axelrod.RiskyQLearner()
P1.Qs = {'': {C: 0, D: -0.9}, '0.0': {C: 0, D: 0}}
P1.Vs = {'': 0, '0.0': 0}
P1.history = [C, D, D, D]
P1.prev_state = C
P1.reset()
self.assertEqual(P1.prev_state, '')
self.assertEqual(P1.Vs, {'': 0})
self.assertEqual(P1.Qs, {'': {C: 0, D: 0}})
class TestArrogantQLearner(TestPlayer):
name = 'Arrogant QLearner'
player = axelrod.ArrogantQLearner
expected_classifier = {
'memory_depth': float('inf'), # Long memory
'stochastic': True,
'makes_use_of': set(["game"]),
'long_run_time': False,
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def test_qs_update(self):
"""Test that the q and v values update."""
random.seed(5)
p1 = axelrod.ArrogantQLearner()
p2 = axelrod.Cooperator()
play_1, play_2 = simulate_play(p1, p2)
self.assertEqual(p1.Qs, {'': {C: 0, D: 0.9}, '0.0': {C: 0, D: 0}})
simulate_play(p1, p2)
self.assertEqual(p1.Qs,{'': {C: 0, D: 0.9}, '0.0': {C: 2.7, D: 0},
'C1.0': {C: 0, D: 0}})
def test_vs_update(self):
"""Test that the q and v values update."""
random.seed(5)
p1 = axelrod.ArrogantQLearner()
p2 = axelrod.Cooperator()
simulate_play(p1, p2)
self.assertEqual(p1.Vs, {'': 0.9, '0.0': 0})
simulate_play(p1, p2)
self.assertEqual(p1.Vs,{'': 0.9, '0.0': 2.7, 'C1.0': 0})
def test_prev_state_updates(self):
"""Test that the q and v values update."""
random.seed(5)
p1 = axelrod.ArrogantQLearner()
p2 = axelrod.Cooperator()
simulate_play(p1, p2)
self.assertEqual(p1.prev_state, '0.0')
simulate_play(p1, p2)
self.assertEqual(p1.prev_state, 'C1.0')
def test_strategy(self):
"""Tests that it chooses the best strategy."""
random.seed(9)
p1 = axelrod.ArrogantQLearner()
p1.state = 'CCDC'
p1.Qs = {'': {C: 0, D: 0}, 'CCDC': {C: 2, D: 6}}
p2 = axelrod.Cooperator()
test_responses(self, p1, p2, [C, C, C, C, C, C, C])
def test_reset_method(self):
"""Tests the reset method."""
P1 = axelrod.ArrogantQLearner()
P1.Qs = {'': {C: 0, D: -0.9}, '0.0': {C: 0, D: 0}}
P1.Vs = {'': 0, '0.0': 0}
P1.history = [C, D, D, D]
P1.prev_state = C
P1.reset()
self.assertEqual(P1.prev_state, '')
self.assertEqual(P1.Vs, {'':0})
self.assertEqual(P1.Qs, {'':{C:0, D:0}})
class TestHesitantQLearner(TestPlayer):
name = 'Hesitant QLearner'
player = axelrod.HesitantQLearner
expected_classifier = {
'memory_depth': float('inf'), # Long memory
'stochastic': True,
'makes_use_of': set(["game"]),
'long_run_time': False,
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def test_qs_update(self):
"""Test that the q and v values update."""
random.seed(5)
p1 = axelrod.HesitantQLearner()
p2 = axelrod.Cooperator()
simulate_play(p1, p2)
self.assertEqual(p1.Qs, {'': {C: 0, D: 0.1}, '0.0': {C: 0, D: 0}})
simulate_play(p1, p2)
self.assertEqual(p1.Qs,{'': {C: 0, D: 0.1},
'0.0': {C: 0.30000000000000004, D: 0},
'C1.0': {C: 0, D: 0}})
def test_vs_update(self):
"""Test that the q and v values update."""
random.seed(5)
p1 = axelrod.HesitantQLearner()
p2 = axelrod.Cooperator()
simulate_play(p1, p2)
self.assertEqual(p1.Vs, {'': 0.1, '0.0': 0})
simulate_play(p1, p2)
self.assertEqual(p1.Vs,{'': 0.1, '0.0': 0.30000000000000004, 'C1.0': 0})
def test_prev_state_updates(self):
"""Test that the q and v values update."""
random.seed(5)
p1 = axelrod.HesitantQLearner()
p2 = axelrod.Cooperator()
simulate_play(p1, p2)
self.assertEqual(p1.prev_state, '0.0')
simulate_play(p1, p2)
self.assertEqual(p1.prev_state, 'C1.0')
def test_strategy(self):
"""Tests that it chooses the best strategy."""
random.seed(9)
p1 = axelrod.HesitantQLearner()
p1.state = 'CCDC'
p1.Qs = {'': {C: 0, D: 0}, 'CCDC': {C: 2, D: 6}}
p2 = axelrod.Cooperator()
test_responses(self, p1, p2, [C, C, C, C, C, C, C])
def test_reset_method(self):
"""Tests the reset method."""
P1 = axelrod.HesitantQLearner()
P1.Qs = {'': {C: 0, D: -0.9}, '0.0': {C: 0, D: 0}}
P1.Vs = {'': 0, '0.0': 0}
P1.history = [C, D, D, D]
P1.prev_state = C
P1.reset()
self.assertEqual(P1.prev_state, '')
self.assertEqual(P1.Vs, {'': 0})
self.assertEqual(P1.Qs, {'': {C: 0, D: 0}})
class TestCautiousQLearner(TestPlayer):
name = 'Cautious QLearner'
player = axelrod.CautiousQLearner
expected_classifier = {
'memory_depth': float('inf'), # Long memory
'stochastic': True,
'makes_use_of': set(["game"]),
'long_run_time': False,
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def test_qs_update(self):
"""Test that the q and v values update."""
random.seed(5)
p1 = axelrod.CautiousQLearner()
p2 = axelrod.Cooperator()
simulate_play(p1, p2)
self.assertEqual(p1.Qs, {'': {C: 0, D: 0.1}, '0.0': {C: 0, D: 0}})
simulate_play(p1, p2)
self.assertEqual(p1.Qs,{'': {C: 0, D: 0.1},
'0.0': {C: 0.30000000000000004, D: 0},
'C1.0': {C: 0, D: 0.0}})
def test_vs_update(self):
"""Test that the q and v values update."""
random.seed(5)
p1 = axelrod.CautiousQLearner()
p2 = axelrod.Cooperator()
simulate_play(p1, p2)
self.assertEqual(p1.Vs, {'': 0.1, '0.0': 0})
simulate_play(p1, p2)
self.assertEqual(p1.Vs,{'': 0.1, '0.0': 0.30000000000000004, 'C1.0': 0})
def test_prev_state_updates(self):
"""Test that the q and v values update."""
random.seed(5)
p1 = axelrod.CautiousQLearner()
p2 = axelrod.Cooperator()
simulate_play(p1, p2)
self.assertEqual(p1.prev_state, '0.0')
simulate_play(p1, p2)
self.assertEqual(p1.prev_state, 'C1.0')
def test_strategy(self):
"""Tests that it chooses the best strategy."""
random.seed(9)
p1 = axelrod.CautiousQLearner()
p1.state = 'CCDC'
p1.Qs = {'': {C: 0, D: 0}, 'CCDC': {C: 2, D: 6}}
p2 = axelrod.Cooperator()
test_responses(self, p1, p2, [C, C, C, C, C, C, C])
def test_reset_method(self):
"""Tests the reset method."""
P1 = axelrod.CautiousQLearner()
P1.Qs = {'': {C: 0, D: -0.9}, '0.0': {C: 0, D: 0}}
P1.Vs = {'': 0, '0.0': 0}
P1.history = [C, D, D, D]
P1.prev_state = C
P1.reset()
self.assertEqual(P1.prev_state, '')
self.assertEqual(P1.Vs, {'': 0})
self.assertEqual(P1.Qs, {'': {C: 0, D: 0}})
| 33.881533
| 80
| 0.530852
| 1,298
| 9,724
| 3.876733
| 0.072419
| 0.01868
| 0.125
| 0.025437
| 0.860095
| 0.855525
| 0.843601
| 0.838831
| 0.838831
| 0.838831
| 0
| 0.067595
| 0.298643
| 9,724
| 286
| 81
| 34
| 0.670235
| 0.079597
| 0
| 0.842795
| 0
| 0
| 0.072947
| 0
| 0
| 0
| 0
| 0
| 0.161572
| 1
| 0.091703
| false
| 0
| 0.017467
| 0
| 0.179039
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
749195c7a6af3229eeafbb2bf4df19cc7155054d
| 35,344
|
py
|
Python
|
sketch2image/log_wgan/models_crn_gan_enc_stack_gru_small.py
|
xuhuaren/GoogleAIWinterCamp-img2poem
|
4bb697b46f7ffa50dd45939acc9973632a3b84f8
|
[
"Apache-2.0"
] | 1
|
2019-01-17T11:02:40.000Z
|
2019-01-17T11:02:40.000Z
|
sketch2image/log_wgan/models_crn_gan_enc_stack_gru_small.py
|
xuhuaren/GoogleAIWinterCamp-img2poem
|
4bb697b46f7ffa50dd45939acc9973632a3b84f8
|
[
"Apache-2.0"
] | null | null | null |
sketch2image/log_wgan/models_crn_gan_enc_stack_gru_small.py
|
xuhuaren/GoogleAIWinterCamp-img2poem
|
4bb697b46f7ffa50dd45939acc9973632a3b84f8
|
[
"Apache-2.0"
] | null | null | null |
import functools
import numpy as np
import tensorflow as tf
import tensorflow.contrib.layers as ly
from tensorflow.python.framework import ops
from resnet_rnn import resnet_block, resnet_deconv_block, resnet_conv, resnet_deconv, upsample_conv, mean_pool, unrolled_lstm_conv, unrolled_lstm_deconv, unrolled_gru_conv, unrolled_gru_deconv
print('small')
USE_BOTTLENECK = False
SIZE = 64
NUM_BLOCKS = 1
CRAMER = False
def one_hot_to_dense(labels):
# Assume on value is 1
batch_size = int(labels.get_shape()[0])
return tf.reshape(tf.where(tf.equal(labels, 1))[:, 1], (batch_size,))
def batchnorm(inputs, data_format=None, activation_fn=None, labels=None, n_labels=None):
"""conditional batchnorm (dumoulin et al 2016) for BCHW conv filtermaps"""
if data_format != 'NCHW':
raise Exception('unsupported')
mean, var = tf.nn.moments(inputs, (0, 2, 3), keep_dims=True)
shape = mean.get_shape().as_list() # shape is [1,n,1,1]
offset_m = tf.get_variable('offset', initializer=np.zeros([n_labels, shape[1]], dtype='float32'))
scale_m = tf.get_variable('scale', initializer=np.ones([n_labels, shape[1]], dtype='float32'))
offset = tf.nn.embedding_lookup(offset_m, labels)
scale = tf.nn.embedding_lookup(scale_m, labels)
result = tf.nn.batch_normalization(inputs, mean, var, offset[:, :, None, None], scale[:, :, None, None], 1e-5)
return result
def lrelu(x, leak=0.3, name="lrelu"):
with tf.variable_scope(name):
return tf.maximum(leak * x, x)
def prelu(x, name="prelu"):
with tf.variable_scope(name):
leak = tf.get_variable("param", shape=None, initializer=0.2, regularizer=None,
trainable=True, caching_device=None)
return tf.maximum(leak * x, x)
def miu_relu(x, miu=0.7, name="miu_relu"):
with tf.variable_scope(name):
return (x + tf.sqrt((1 - miu) ** 2 + x ** 2)) / 2.
def p_miu_relu(x, name="p_miu_relu"):
with tf.variable_scope(name):
miu = tf.get_variable("param_miu", shape=None, initializer=0.7, regularizer=None,
trainable=True, caching_device=None)
return (x + tf.sqrt((1 - miu) ** 2 + x ** 2)) / 2.
def matsushita_entropy(x, name="matsushita_entropy"):
with tf.variable_scope(name):
return (1 + x / tf.sqrt(1 + x ** 2)) / 2.
def image_encoder_s1_gru(x, num_classes, reuse=False, data_format='NCHW', labels=None, scope_name=None):
print("CONV_GRU")
assert data_format == 'NCHW'
size = SIZE
num_blocks = NUM_BLOCKS
resize_func = tf.image.resize_bilinear
if normalizer_params_e is not None and normalizer_fn_e != ly.batch_norm and normalizer_fn_e != ly.layer_norm:
normalizer_params_e['labels'] = labels
normalizer_params_e['n_labels'] = num_classes
if data_format == 'NCHW':
resized_x = []
resized_ = x
resized_x.append(resized_)
for i in range(4):
resized_ = mean_pool(resized_, data_format=data_format)
resized_x.append(resized_)
resized_x = resized_x[::-1]
else:
raise NotImplementedError
output_list = []
# with tf.variable_scope(scope_name) as scope:
# if reuse:
# scope.reuse_variables()
x_list = resized_x
h0 = ly.conv2d(x_list[-1], size * 1, kernel_size=7, stride=2, data_format=data_format,
activation_fn=activation_fn_e,
normalizer_fn=normalizer_fn_e,
normalizer_params=normalizer_params_e,
weights_initializer=weight_initializer)
# Initial memory state
hidden_state_shape = h0.get_shape().as_list()
batch_size = hidden_state_shape[0]
hidden_state_shape[0] = 1
hts_0 = [h0]
for i in range(1, num_blocks):
h0 = tf.tile(tf.get_variable("initial_hidden_state_%d" % i, shape=hidden_state_shape, dtype=tf.float32,
initializer=tf.zeros_initializer()), [batch_size, 1, 1, 1])
hts_0.append(h0)
hts_1 = unrolled_gru_conv(x_list[-2], hts_0,
size * 1, stride=2, dilate_rate=1,
data_format=data_format, num_blocks=num_blocks,
first_unit=True, last_unit=False,
activation_fn=activation_fn_e,
normalizer_fn=normalizer_fn_e,
normalizer_params=normalizer_params_e,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=1)
output_list.append(hts_1[-1])
hts_2 = unrolled_gru_conv(x_list[-3], hts_1,
size * 2, stride=2, dilate_rate=1,
data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_e,
normalizer_fn=normalizer_fn_e,
normalizer_params=normalizer_params_e,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=2)
output_list.append(hts_2[-1])
hts_3 = unrolled_gru_conv(x_list[-4], hts_2,
size * 4, stride=2, dilate_rate=1,
data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_e,
normalizer_fn=normalizer_fn_e,
normalizer_params=normalizer_params_e,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=3)
output_list.append(hts_3[-1])
hts_4 = unrolled_gru_conv(x_list[-5], hts_3,
size * 8, stride=2, dilate_rate=1,
data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=True,
activation_fn=activation_fn_e,
normalizer_fn=normalizer_fn_e,
normalizer_params=normalizer_params_e,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=4)
output_list.append(hts_4[-1])
return output_list
# GRU
def generator_l_s1_skip(z, output_channel, num_classes, reuse=False, data_format='NCHW',
labels=None, scope_name=None):
print("DECONV_GRU")
size = SIZE
num_blocks = NUM_BLOCKS
input_dims = z.get_shape().as_list()
resize_func = tf.image.resize_area
if data_format == 'NCHW':
height = input_dims[2]
width = input_dims[3]
z_orig = tf.identity(z)
z = tf.transpose(z, [0, 2, 3, 1])
resized_z = [
tf.transpose(resize_func(z, [int(height / 32), int(width / 32)]), [0, 3, 1, 2]),
tf.transpose(resize_func(z, [int(height / 16), int(width / 16)]), [0, 3, 1, 2]),
tf.transpose(resize_func(z, [int(height / 8), int(width / 8)]), [0, 3, 1, 2]),
tf.transpose(resize_func(z, [int(height / 4), int(width / 4)]), [0, 3, 1, 2]),
tf.transpose(resize_func(z, [int(height / 2), int(width / 2)]), [0, 3, 1, 2]),
]
z = z_orig
else:
height = input_dims[1]
width = input_dims[2]
resized_z = [
resize_func(z, [int(height / 32), int(width / 32)]),
resize_func(z, [int(height / 16), int(width / 16)]),
resize_func(z, [int(height / 8), int(width / 8)]),
resize_func(z, [int(height / 4), int(width / 4)]),
resize_func(z, [int(height / 2), int(width / 2)]),
]
if data_format == 'NCHW':
concat_axis = 1
else:
concat_axis = 3
output_list = []
if normalizer_params_g is not None and normalizer_fn_g != ly.batch_norm and normalizer_fn_g != ly.layer_norm:
normalizer_params_g['labels'] = labels
normalizer_params_g['n_labels'] = num_classes
with tf.variable_scope(scope_name) as scope:
if reuse:
scope.reuse_variables()
z_encoded = image_encoder_s1_gru(z, num_classes=num_classes, reuse=reuse, data_format=data_format,
labels=labels, scope_name=scope_name)
input_e_dims = z_encoded[-1].get_shape().as_list()
input_e_dims[concat_axis] = int(input_e_dims[concat_axis] / 2.)
noise = tf.random_normal(shape=(input_e_dims[0], 256), dtype=tf.float32)
noise = ly.fully_connected(noise, int(np.prod(input_e_dims[1:])), activation_fn=activation_fn_g)
noise = tf.reshape(noise, shape=input_e_dims)
# Initial memory state
hidden_state_shape = z_encoded[-1].get_shape().as_list()
batch_size = hidden_state_shape[0]
hidden_state_shape[0] = 1
hts_0 = [z_encoded[-1]]
for i in range(1, num_blocks):
h0 = tf.tile(tf.get_variable("initial_hidden_state_%d" % i, shape=hidden_state_shape, dtype=tf.float32,
initializer=tf.random_normal_initializer()), [batch_size, 1, 1, 1])
hts_0.append(h0)
input_0 = tf.concat([resized_z[0], noise], axis=concat_axis)
hts_1 = unrolled_gru_deconv(input_0, hts_0,
size * 6, stride=2, data_format=data_format, num_blocks=num_blocks,
first_unit=True, last_unit=False,
activation_fn=activation_fn_g,
normalizer_fn=normalizer_fn_g,
normalizer_params=normalizer_params_g,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=0)
# output_list.append(ly.conv2d(hts_1[-1], 3, 3, stride=1, data_format=data_format,
# normalizer_fn=None, activation_fn=tf.nn.tanh,
# weights_initializer=weight_initializer))
input_1 = tf.concat([resized_z[1], z_encoded[-2]], axis=concat_axis)
hts_2 = unrolled_gru_deconv(input_1, hts_1,
size * 4, stride=2, data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_g,
normalizer_fn=normalizer_fn_g,
normalizer_params=normalizer_params_g,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=2)
# output_list.append(ly.conv2d(hts_2[-1], 3, 3, stride=1, data_format=data_format,
# normalizer_fn=None, activation_fn=tf.nn.tanh,
# weights_initializer=weight_initializer))
input_2 = tf.concat([resized_z[2], z_encoded[-3]], axis=concat_axis)
hts_3 = unrolled_gru_deconv(input_2, hts_2,
size * 2, stride=2, data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_g,
normalizer_fn=normalizer_fn_g,
normalizer_params=normalizer_params_g,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=4)
# output_list.append(ly.conv2d(hts_3[-1], 3, 3, stride=1, data_format=data_format,
# normalizer_fn=None, activation_fn=tf.nn.tanh,
# weights_initializer=weight_initializer))
input_3 = tf.concat([resized_z[3], z_encoded[-4]], axis=concat_axis)
hts_4 = unrolled_gru_deconv(input_3, hts_3,
size * 2, stride=2, data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_g,
normalizer_fn=normalizer_fn_g,
normalizer_params=normalizer_params_g,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=6)
# output_list.append(ly.conv2d(hts_4[-1], 3, 3, stride=1, data_format=data_format,
# normalizer_fn=None, activation_fn=tf.nn.tanh,
# weights_initializer=weight_initializer))
hts_5 = unrolled_gru_deconv(resized_z[4], hts_4,
size * 1, stride=2, data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=True,
activation_fn=activation_fn_g,
normalizer_fn=normalizer_fn_g,
normalizer_params=normalizer_params_g,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=8)
output_list.append(ly.conv2d(hts_5[-1], 3, 7, stride=1, data_format=data_format,
normalizer_fn=None, activation_fn=tf.nn.tanh,
weights_initializer=weight_initializer))
# out = ly.conv2d(train, output_channel, 7, stride=1, data_format=data_format,
# activation_fn=tf.nn.tanh, weights_initializer=weight_initializer)
assert output_list[-1].get_shape().as_list()[2] == 64
return output_list
# GRU
def generator_l_s2(z, extra, output_channel, num_classes, reuse=False, data_format='NCHW',
labels=None, scope_name=None):
print("DECONV_GRU")
size = SIZE
num_blocks = NUM_BLOCKS
if type(z) is list:
z = z[-1]
input_dims = extra.get_shape().as_list()
resize_func = tf.image.resize_area
if data_format == 'NCHW':
height = input_dims[2]
width = input_dims[3]
extra_orig = tf.identity(extra)
extra = tf.transpose(extra, [0, 2, 3, 1])
resized_extra = [
tf.transpose(resize_func(extra, [int(height / 32), int(width / 32)]), [0, 3, 1, 2]),
tf.transpose(resize_func(extra, [int(height / 16), int(width / 16)]), [0, 3, 1, 2]),
# tf.transpose(resize_func(extra, [int(height / 8), int(width / 8)]), [0, 3, 1, 2]),
tf.transpose(resize_func(extra, [int(height / 8), int(width / 8)]), [0, 3, 1, 2]),
tf.transpose(resize_func(extra, [int(height / 4), int(width / 4)]), [0, 3, 1, 2]),
tf.transpose(resize_func(extra, [int(height / 2), int(width / 2)]), [0, 3, 1, 2]),
]
extra = extra_orig
else:
raise NotImplementedError
height = input_dims[1]
width = input_dims[2]
resized_extra = [
# resize_func(extra, [int(height / 32), int(width / 32)]),
# resize_func(extra, [int(height / 16), int(width / 16)]),
resize_func(extra, [int(height / 8), int(width / 8)]),
resize_func(extra, [int(height / 4), int(width / 4)]),
resize_func(extra, [int(height / 2), int(width / 2)]),
]
if data_format == 'NCHW':
concat_axis = 1
else:
concat_axis = 3
output_list = []
if normalizer_params_g is not None and normalizer_fn_g != ly.batch_norm and normalizer_fn_g != ly.layer_norm:
normalizer_params_g['labels'] = labels
normalizer_params_g['n_labels'] = num_classes
with tf.variable_scope(scope_name) as scope:
if reuse:
scope.reuse_variables()
z_encoded = image_encoder_s2(z, num_classes=num_classes, reuse=reuse, data_format=data_format,
labels=labels, scope_name=scope_name)
# Initial memory state
hidden_state_shape = z_encoded.get_shape().as_list()
batch_size = hidden_state_shape[0]
hidden_state_shape[0] = 1
hts_0 = [z_encoded]
for i in range(1, num_blocks):
h0 = tf.tile(tf.get_variable("initial_hidden_state_%d" % i, shape=hidden_state_shape, dtype=tf.float32,
initializer=tf.random_normal_initializer()), [batch_size, 1, 1, 1])
hts_0.append(h0)
hts_1 = unrolled_gru_deconv(resized_extra[0], hts_0,
size * 8, stride=2, data_format=data_format, num_blocks=num_blocks,
first_unit=True, last_unit=False,
activation_fn=activation_fn_g,
normalizer_fn=normalizer_fn_g,
normalizer_params=normalizer_params_g,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=0)
# hts_1 = unrolled_gru_deconv(resized_extra[0], hts_1,
# size * 8, stride=1, data_format=data_format, num_blocks=num_blocks,
# first_unit=False, last_unit=False,
# activation_fn=activation_fn_g,
# normalizer_fn=normalizer_fn_g,
# normalizer_params=normalizer_params_g,
# weights_initializer=weight_initializer,
# use_bottleneck=USE_BOTTLENECK,
# unit_num=1)
hts_1 = unrolled_gru_deconv(resized_extra[1], hts_1,
size * 8, stride=2, data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_g,
normalizer_fn=normalizer_fn_g,
normalizer_params=normalizer_params_g,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=2)
hts_2 = unrolled_gru_deconv(resized_extra[2], hts_1,
size * 4, stride=2, data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_g,
normalizer_fn=normalizer_fn_g,
normalizer_params=normalizer_params_g,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=11)
hts_3 = unrolled_gru_deconv(resized_extra[3], hts_2,
size * 2, stride=2, data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_g,
normalizer_fn=normalizer_fn_g,
normalizer_params=normalizer_params_g,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=12)
hts_4 = unrolled_gru_deconv(resized_extra[4], hts_3,
size * 1, stride=2, data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=True,
activation_fn=activation_fn_g,
normalizer_fn=normalizer_fn_g,
normalizer_params=normalizer_params_g,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=13)
output_list.append(ly.conv2d(hts_4[-1], 3, 7, stride=1, data_format=data_format,
normalizer_fn=None, activation_fn=tf.nn.tanh,
weights_initializer=weight_initializer))
print("G_s2 out: %d" % output_list[-1].get_shape().as_list()[2])
return output_list
# GRU
def critic_l_multiple_s1(x, num_classes, reuse=False, data_format='NCHW', scope_name=None, cramer=CRAMER):
print("CONV_GRU")
assert data_format == 'NCHW'
size = SIZE
num_blocks = NUM_BLOCKS
resize_func = tf.image.resize_bilinear
if data_format == 'NCHW':
concat_axis = 1
else:
concat_axis = 3
if type(x) is list:
x = x[-1]
# if cond is not None:
# x = tf.concat([x, cond], axis=concat_axis)
if data_format == 'NCHW':
resized_x = []
resized_ = x
resized_x.append(resized_)
for i in range(4):
resized_ = mean_pool(resized_, data_format=data_format)
resized_x.append(resized_)
resized_x = resized_x[::-1]
else:
raise NotImplementedError
output_list = []
output_dim = 256 if cramer else 1
with tf.variable_scope(scope_name) as scope:
if reuse:
scope.reuse_variables()
x_list = resized_x
h0 = ly.conv2d(x_list[-1], 6, kernel_size=7, stride=1, data_format=data_format,
activation_fn=activation_fn_d,
normalizer_fn=normalizer_fn_d,
normalizer_params=normalizer_params_d,
weights_initializer=weight_initializer)
# Initial memory state
hidden_state_shape = h0.get_shape().as_list()
batch_size = hidden_state_shape[0]
hidden_state_shape[0] = 1
hts_0 = [h0]
for i in range(1, num_blocks):
h0 = tf.tile(tf.get_variable("initial_hidden_state_%d" % i, shape=hidden_state_shape, dtype=tf.float32,
initializer=tf.zeros_initializer()), [batch_size, 1, 1, 1])
hts_0.append(h0)
hts_1 = unrolled_gru_conv(x_list[-1], hts_0,
size * 2, stride=2, dilate_rate=1,
data_format=data_format, num_blocks=num_blocks,
first_unit=True, last_unit=False,
activation_fn=activation_fn_d,
normalizer_fn=normalizer_fn_d,
normalizer_params=normalizer_params_d,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=1)
hts_2 = unrolled_gru_conv(x_list[-2], hts_1,
size * 4, stride=2, dilate_rate=1,
data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_d,
normalizer_fn=normalizer_fn_d,
normalizer_params=normalizer_params_d,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=2)
hts_3 = unrolled_gru_conv(x_list[-3], hts_2,
size * 8, stride=2, dilate_rate=1,
data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_d,
normalizer_fn=normalizer_fn_d,
normalizer_params=normalizer_params_d,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=3)
hts_4 = unrolled_gru_conv(x_list[-4], hts_3,
size * 16, stride=2, dilate_rate=1,
data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=True,
activation_fn=activation_fn_d,
normalizer_fn=normalizer_fn_d,
normalizer_params=normalizer_params_d,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=4)
img = hts_4[-1]
# discriminator end
disc = ly.conv2d(img, output_dim, kernel_size=1, stride=1, data_format=data_format,
activation_fn=None, normalizer_fn=None,
weights_initializer=weight_initializer)
# classification end
img = tf.reduce_mean(img, axis=(2, 3) if data_format == 'NCHW' else (1, 2))
logits = ly.fully_connected(img, num_classes, activation_fn=None, normalizer_fn=None)
return disc, logits
# GRU
def critic_l_multiple_s2(x, num_classes, reuse=False, data_format='NCHW', scope_name=None, cramer=CRAMER):
print("CONV_GRU")
assert data_format == 'NCHW'
size = SIZE
num_blocks = NUM_BLOCKS
resize_func = tf.image.resize_bilinear
if data_format == 'NCHW':
concat_axis = 1
else:
concat_axis = 3
if type(x) is list:
x = x[-1]
# if cond is not None:
# x = tf.concat([x, cond], axis=concat_axis)
if data_format == 'NCHW':
resized_x = []
resized_ = x
resized_x.append(resized_)
resized_ = mean_pool(resized_, data_format=data_format)
for i in range(6):
resized_ = mean_pool(resized_, data_format=data_format)
resized_x.append(resized_)
resized_x = resized_x[::-1]
else:
raise NotImplementedError
output_list = []
output_dim = 256 if cramer else 1
with tf.variable_scope(scope_name) as scope:
if reuse:
scope.reuse_variables()
x_list = resized_x
h0 = ly.conv2d(x_list[-1], 6, kernel_size=7, stride=2, data_format=data_format,
activation_fn=activation_fn_d,
normalizer_fn=normalizer_fn_d,
normalizer_params=normalizer_params_d,
weights_initializer=weight_initializer)
# Initial memory state
hidden_state_shape = h0.get_shape().as_list()
batch_size = hidden_state_shape[0]
hidden_state_shape[0] = 1
hts_0 = [h0]
for i in range(1, num_blocks):
h0 = tf.tile(tf.get_variable("initial_hidden_state_%d" % i, shape=hidden_state_shape, dtype=tf.float32,
initializer=tf.zeros_initializer()), [batch_size, 1, 1, 1])
hts_0.append(h0)
inp_0 = ly.conv2d(x_list[-1], 6, kernel_size=7, stride=2, data_format=data_format,
activation_fn=activation_fn_d,
normalizer_fn=normalizer_fn_d,
normalizer_params=normalizer_params_d,
weights_initializer=weight_initializer)
hts_1 = unrolled_gru_conv(inp_0, hts_0,
size * 1, stride=2, dilate_rate=1,
data_format=data_format, num_blocks=num_blocks,
first_unit=True, last_unit=False,
activation_fn=activation_fn_d,
normalizer_fn=normalizer_fn_d,
normalizer_params=normalizer_params_d,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=1)
hts_2 = unrolled_gru_conv(x_list[-2], hts_1,
size * 2, stride=2, dilate_rate=1,
data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_d,
normalizer_fn=normalizer_fn_d,
normalizer_params=normalizer_params_d,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=2)
hts_3 = unrolled_gru_conv(x_list[-3], hts_2,
size * 4, stride=2, dilate_rate=1,
data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_d,
normalizer_fn=normalizer_fn_d,
normalizer_params=normalizer_params_d,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=3)
hts_4 = unrolled_gru_conv(x_list[-4], hts_3,
size * 8, stride=2, dilate_rate=1,
data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_d,
normalizer_fn=normalizer_fn_d,
normalizer_params=normalizer_params_d,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=4)
hts_5 = unrolled_gru_conv(x_list[-5], hts_4,
size * 16, stride=2, dilate_rate=1,
data_format=data_format, num_blocks=num_blocks,
first_unit=False, last_unit=False,
activation_fn=activation_fn_d,
normalizer_fn=normalizer_fn_d,
normalizer_params=normalizer_params_d,
weights_initializer=weight_initializer,
use_bottleneck=USE_BOTTLENECK,
unit_num=5)
# hts_6 = unrolled_gru_conv(x_list[-6], hts_5,
# size * 16, stride=2, dilate_rate=1,
# data_format=data_format, num_blocks=num_blocks,
# first_unit=False, last_unit=True,
# activation_fn=activation_fn_d,
# normalizer_fn=normalizer_fn_d,
# normalizer_params=normalizer_params_d,
# weights_initializer=weight_initializer,
# use_bottleneck=USE_BOTTLENECK,
# unit_num=6)
img = hts_5[-1]
# img = tf.concat(output_list, axis=concat_axis)
# img = tf.add_n(
# [img[:, :, ::2, ::2], img[:, :, 1::2, ::2], img[:, :, ::2, 1::2], img[:, :, 1::2, 1::2]]) / 4.
# discriminator end
disc = ly.conv2d(img, output_dim, kernel_size=1, stride=1, data_format=data_format,
activation_fn=None, normalizer_fn=None,
weights_initializer=weight_initializer)
# classification end
img = tf.reduce_mean(img, axis=(2, 3) if data_format == 'NCHW' else (1, 2))
logits = ly.fully_connected(img, num_classes, activation_fn=None, normalizer_fn=None)
return disc, logits
weight_initializer = tf.random_normal_initializer(0, 0.02)
# weight_initializer = ly.xavier_initializer_conv2d()
def set_param(data_format='NCHW'):
global model_data_format, normalizer_fn_e, normalizer_fn_g, normalizer_fn_d, normalizer_fn_ce,\
normalizer_params_e, normalizer_params_g, normalizer_params_d, normalizer_params_ce
model_data_format = data_format
# normalizer_fn_e = ly.batch_norm
# normalizer_params_e = {'fused': True, 'data_format': model_data_format,
# 'is_training': True}
normalizer_fn_e = batchnorm
normalizer_params_e = {'data_format': model_data_format}
normalizer_fn_g = batchnorm
normalizer_params_g = {'data_format': model_data_format}
# normalizer_fn_e = None
# normalizer_params_e = None
# normalizer_fn_g = None
# normalizer_params_g = None
# normalizer_fn_g = ly.layer_norm
# normalizer_params_g = None
normalizer_fn_d = None
normalizer_params_d = None
normalizer_fn_ce = None
normalizer_params_ce = None
model_data_format = None
normalizer_fn_e = ly.batch_norm
normalizer_params_e = {'fused': True, 'data_format': model_data_format,
'is_training': True}
# normalizer_params_e = {'fused': True, 'data_format': model_data_format,
# 'is_training': True, 'decay': 0.95}
normalizer_fn_g = ly.batch_norm
normalizer_params_g = {'fused': True, 'data_format': model_data_format,
'is_training': True}
# normalizer_params_g = {'fused': True, 'data_format': model_data_format,
# 'is_training': True, 'decay': 0.95}
normalizer_fn_d = None
normalizer_params_d = None
normalizer_fn_ce = None
normalizer_params_ce = None
activation_fn_e = miu_relu
activation_fn_g = miu_relu
activation_fn_d = prelu
print('prelu')
activation_fn_d_last = None
# activation_fn_d_last = None
# activation_fn_ce = prelu
generator_s1 = generator_l_s1_skip
generator_s2 = generator_l_s2
critic_s1 = critic_l_multiple_s1
critic_s2 = critic_l_multiple_s2
# critic_e = critic_e_fc
| 48.087075
| 192
| 0.541025
| 4,000
| 35,344
| 4.42825
| 0.056
| 0.072263
| 0.035567
| 0.05081
| 0.855643
| 0.829447
| 0.811325
| 0.786089
| 0.769153
| 0.733473
| 0
| 0.026419
| 0.372425
| 35,344
| 734
| 193
| 48.152589
| 0.772147
| 0.107373
| 0
| 0.732143
| 0
| 0
| 0.015223
| 0.003655
| 0
| 0
| 0
| 0
| 0.007143
| 1
| 0.023214
| false
| 0
| 0.010714
| 0
| 0.055357
| 0.014286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7776110e7b7bc87f31f8e07f398e74c9f29c68a4
| 447,562
|
py
|
Python
|
referencesrv/tests/unittests/stubdata/dataXML.py
|
romanchyla/reference_service
|
f192c4627750258d25776617d8acd09fe0a8cead
|
[
"MIT"
] | null | null | null |
referencesrv/tests/unittests/stubdata/dataXML.py
|
romanchyla/reference_service
|
f192c4627750258d25776617d8acd09fe0a8cead
|
[
"MIT"
] | null | null | null |
referencesrv/tests/unittests/stubdata/dataXML.py
|
romanchyla/reference_service
|
f192c4627750258d25776617d8acd09fe0a8cead
|
[
"MIT"
] | null | null | null |
train_1 = [[[('AUTHOR_FIRST_NAME', u'J.D.'), ('AUTHOR_LAST_NAME', u'Adams'), ('AUTHOR_FIRST_NAME', u'T.L.'), ('AUTHOR_LAST_NAME', u'Herter'), ('AUTHOR_FIRST_NAME', u'G.E.'), ('AUTHOR_LAST_NAME', u'Gull'), ('AUTHOR_FIRST_NAME', u'J.'), ('AUTHOR_LAST_NAME', u'Shoenwald'), ('AUTHOR_FIRST_NAME', u'C.P.'), ('AUTHOR_LAST_NAME', u'Henderson'), ('AUTHOR_FIRST_NAME', u'L.D.'), ('AUTHOR_LAST_NAME', u'Keller'), ('AUTHOR_FIRST_NAME', u'J.M.'), ('AUTHOR_LAST_NAME', u'DeBuizer'), ('AUTHOR_FIRST_NAME', u'G.J.'), ('AUTHOR_LAST_NAME', u'Stacey'), ('AUTHOR_FIRST_NAME', u'T.'), ('AUTHOR_LAST_NAME', u'Nikola'), ('TITLE', u'FORCAST:'), ('TITLE', u'A'), ('TITLE', u'first'), ('TITLE', u'light'), ('TITLE', u'facility'), ('TITLE', u'instrument'), ('TITLE', u'for'), ('TITLE', u'SOFIA:'), ('JOURNAL', u'Proc.'), ('JOURNAL', u'SPIE'), ('VOLUME', u'7735'), ('YEAR', u'2010'), ('PAGE', u'eid7735 1U')],
[('AUTHOR_FIRST_NAME', u'M.'), ('AUTHOR_LAST_NAME', u'Bertero'), ('AUTHOR_FIRST_NAME', u'P.'), ('AUTHOR_LAST_NAME', u'Boccacci'), ('TITLE', u'Introduction'), ('TITLE', u'to'), ('TITLE', u'Inverse'), ('TITLE', u'Problems'), ('TITLE', u'in'), ('TITLE', u'Imaging:'), ('PUBLISHER', u'CRC'), ('PUBLISHER', u'Press'), ('YEAR', u'1998'), ('PAGE', u'352')],
[('AUTHOR_FIRST_NAME', u'B.J.'), ('AUTHOR_LAST_NAME', u'Conrath'), ('AUTHOR_FIRST_NAME', u'P.J.'), ('AUTHOR_LAST_NAME', u'Gierasch'), ('AUTHOR_FIRST_NAME', u'E.A.'), ('AUTHOR_LAST_NAME', u'Ustinov'), ('TITLE', u'Thermal'), ('TITLE', u'structure'), ('TITLE', u'and'), ('TITLE', u'para'), ('TITLE', u'hydrogen'), ('TITLE', u'fraction'), ('TITLE', u'on'), ('TITLE', u'the'), ('TITLE', u'outer'), ('TITLE', u'planets'), ('TITLE', u'from'), ('TITLE', u'Voyager'), ('TITLE', u'IRIS'), ('TITLE', u'measurements:'), ('JOURNAL', u'Icarus'), ('VOLUME', u'135'), ('YEAR', u'1998'), ('PAGE', u'501-517')],
[('AUTHOR_FIRST_NAME', u'B.J.'), ('AUTHOR_LAST_NAME', u'Conrath'), ('AUTHOR_FIRST_NAME', u'P.J.'), ('AUTHOR_LAST_NAME', u'Gierasch'), ('TITLE', u'Global'), ('TITLE', u'variation'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'para'), ('TITLE', u'hydrogen'), ('TITLE', u'fraction'), ('TITLE', u'in'), ('TITLE', u"Jupiter's"), ('TITLE', u'atmosphere'), ('TITLE', u'and'), ('TITLE', u'implications'), ('TITLE', u'for'), ('TITLE', u'dynamics'), ('TITLE', u'on'), ('TITLE', u'the'), ('TITLE', u'outer'), ('TITLE', u'planets:'), ('JOURNAL', u'Icarus'), ('VOLUME', u'57'), ('YEAR', u'1984'), ('PAGE', u'184-204')],
[('AUTHOR_FIRST_NAME', u'I.J.D'), ('AUTHOR_LAST_NAME', u'Craig'), ('AUTHOR_FIRST_NAME', u'J.C.'), ('AUTHOR_LAST_NAME', u'Brown'), ('TITLE', u'Inverse'), ('TITLE', u'Problems'), ('TITLE', u'in'), ('TITLE', u'Astronomy:'), ('TITLE', u'A'), ('TITLE', u'Guide'), ('TITLE', u'to'), ('TITLE', u'Inversion'), ('TITLE', u'Strategies'), ('TITLE', u'for'), ('TITLE', u'Remotely'), ('TITLE', u'Sensed'), ('TITLE', u'Data:'), ('PUBLISHER', u'CRC'), ('PUBLISHER', u'Press'), ('YEAR', u'1986'), ('PAGE', u'160')],
[('AUTHOR_FIRST_NAME', u'A.'), ('AUTHOR_LAST_NAME', u'Farkas'), ('TITLE', u'Orthohydrogen,'), ('TITLE', u'Parahydrogen'), ('TITLE', u'and'), ('TITLE', u'Heavy'), ('TITLE', u'Hydrogen:'), ('PUBLISHER', u'Cambridge'), ('PUBLISHER', u'University'), ('PUBLISHER', u'Press'), ('YEAR', u'1935'), ('PAGE', u'215')],
[('AUTHOR_FIRST_NAME', u'L.'), ('AUTHOR_LAST_NAME', u'Fletcher'), ('TITLE', u'Seasonal'), ('TITLE', u'variability'), ('TITLE', u'of'), ('TITLE', u'Saturns'), ('TITLE', u'tropospheric'), ('TITLE', u'temperatures,'), ('TITLE', u'winds'), ('TITLE', u'and'), ('TITLE', u'para-'), ('TITLE', u'H2'), ('TITLE', u'from'), ('TITLE', u'Cassini'), ('TITLE', u'far-'), ('TITLE', u'IR'), ('TITLE', u'spectroscopy:'), ('JOURNAL', u'Icarus'), ('VOLUME', u'264'), ('YEAR', u'2016'), ('PAGE', u'137-159')],
[('AUTHOR_FIRST_NAME', u'L.'), ('AUTHOR_LAST_NAME', u'Fletcher'), ('AUTHOR_FIRST_NAME', u'I.'), ('AUTHOR_LAST_NAME', u'de Pater'), ('AUTHOR_FIRST_NAME', u'W.T.'), ('AUTHOR_LAST_NAME', u'Reach'), ('TITLE', u"Jupiter's"), ('TITLE', u'para-'), ('TITLE', u'H2'), ('TITLE', u'distribution'), ('TITLE', u'from'), ('TITLE', u'SOFIA/FORCAST'), ('TITLE', u'and'), ('TITLE', u'Voyager/IRIS'), ('TITLE', u'17-'), ('TITLE', u'37m'), ('TITLE', u'spectroscopy:'), ('JOURNAL', u'Icarus'), ('VOLUME', u'286'), ('YEAR', u'2017'), ('PAGE', u'223-240')],
[('AUTHOR_FIRST_NAME', u'L.N.'), ('AUTHOR_LAST_NAME', u'Fletcher'), ('AUTHOR_FIRST_NAME', u'G.S.'), ('AUTHOR_LAST_NAME', u'Orton'), ('AUTHOR_FIRST_NAME', u'P.'), ('AUTHOR_LAST_NAME', u'Yanamandra-Fisher'), ('AUTHOR_FIRST_NAME', u'B.M.'), ('AUTHOR_LAST_NAME', u'Fisher'), ('AUTHOR_FIRST_NAME', u'B.D.'), ('AUTHOR_LAST_NAME', u'Parrish'), ('AUTHOR_FIRST_NAME', u'P.G.J.'), ('AUTHOR_LAST_NAME', u'Irwin'), ('TITLE', u'Retrievals'), ('TITLE', u'of'), ('TITLE', u'atmospheric'), ('TITLE', u'variables'), ('TITLE', u'on'), ('TITLE', u'the'), ('TITLE', u'gas'), ('TITLE', u'giants'), ('TITLE', u'from'), ('TITLE', u'ground-'), ('TITLE', u'based'), ('TITLE', u'mid-'), ('TITLE', u'infrared'), ('TITLE', u'imaging:'), ('JOURNAL', u'Icarus'), ('VOLUME', u'200'), ('YEAR', u'2009'), ('PAGE', u'154-175')],
[('AUTHOR_FIRST_NAME', u'T.L.'), ('AUTHOR_LAST_NAME', u'Herter'), ('AUTHOR_FIRST_NAME', u'J.D.'), ('AUTHOR_LAST_NAME', u'Adams'), ('AUTHOR_FIRST_NAME', u'J.M.'), ('AUTHOR_LAST_NAME', u'DeBuizer'), ('AUTHOR_FIRST_NAME', u'G.E.'), ('AUTHOR_LAST_NAME', u'Gull'), ('AUTHOR_FIRST_NAME', u'J.'), ('AUTHOR_LAST_NAME', u'Shoenwald'), ('AUTHOR_FIRST_NAME', u'C.P.'), ('AUTHOR_LAST_NAME', u'Henderson'), ('AUTHOR_FIRST_NAME', u'L.D.'), ('AUTHOR_LAST_NAME', u'Keller'), ('AUTHOR_FIRST_NAME', u'T.'), ('AUTHOR_LAST_NAME', u'Nikola'), ('AUTHOR_FIRST_NAME', u'G.J.'), ('AUTHOR_LAST_NAME', u'Stacey'), ('AUTHOR_FIRST_NAME', u'W.D.'), ('AUTHOR_LAST_NAME', u'Vacca'), ('TITLE', u'FORCAST:'), ('TITLE', u'a'), ('TITLE', u'first'), ('TITLE', u'light'), ('TITLE', u'facility'), ('TITLE', u'instrument'), ('TITLE', u'for'), ('TITLE', u'SOFIA:'), ('JOURNAL', u'Proc.'), ('JOURNAL', u'SPIE'), ('VOLUME', u'7735'), ('YEAR', u'2010'), ('PAGE', u'eid7735 1U')],
[('AUTHOR_FIRST_NAME', u'S.T.'), ('AUTHOR_LAST_NAME', u'Massie'), ('AUTHOR_FIRST_NAME', u'D.M.'), ('AUTHOR_LAST_NAME', u'Hunten'), ('TITLE', u'Conversion'), ('TITLE', u'of'), ('TITLE', u'para'), ('TITLE', u'and'), ('TITLE', u'ortho'), ('TITLE', u'hydrogen'), ('TITLE', u'in'), ('TITLE', u'the'), ('TITLE', u'Jovian'), ('TITLE', u'planets:'), ('JOURNAL', u'Icarus'), ('VOLUME', u'49'), ('YEAR', u'1982'), ('PAGE', u'213-226')]],
[[('AUTHOR_LAST_NAME', u'Coleman'), ('JOURNAL', u'Progress'), ('JOURNAL', u'in'), ('JOURNAL', u'lipid'), ('JOURNAL', u'research'), ('VOLUME', u'43'), ('ISSUE', u'2'), ('YEAR', u'2004'), ('PAGE', u'134'), ('DOI', u'10.1016/S0163-7827(03)00051-1'), ('ISSN', u'0163-7827'), ('REFSTR', "{u'journal_title': u'Progress in lipid research', u'doi': u'10.1016/S0163-7827(03)00051-1', u'author': u'Coleman', u'issn': u'0163-7827', u'cyear': u'2004', u'volume': u'43', u'@key': u'1_17939177', u'first_page': u'134', u'issue': u'2'}")],
[('JOURNAL', u'American'), ('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Physiology'), ('JOURNAL', u'-'), ('JOURNAL', u'Endocrinology'), ('JOURNAL', u'And'), ('JOURNAL', u'Metabolism'), ('VOLUME', u'296'), ('ISSUE', u'6'), ('YEAR', u'2009'), ('PAGE', u'E1195'), ('DOI', u'10.1152/ajpendo.90958.2008'), ('ISSN', u'0193-1849'), ('REFSTR', "{u'doi': u'10.1152/ajpendo.90958.2008', u'journal_title': u'American Journal of Physiology - Endocrinology And Metabolism', u'issn': u'0193-1849', u'cyear': u'2009', u'volume': u'296', u'@key': u'2_34480394', u'first_page': u'E1195', u'issue': u'6'}")],
[('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Biological'), ('JOURNAL', u'Chemistry'), ('VOLUME', u'284'), ('ISSUE', u'5'), ('YEAR', u'2009'), ('PAGE', u'2593'), ('DOI', u'10.1074/jbc.R800059200'), ('ISSN', u'0021-9258'), ('REFSTR', "{u'doi': u'10.1074/jbc.R800059200', u'journal_title': u'Journal of Biological Chemistry', u'issn': u'0021-9258', u'cyear': u'2009', u'volume': u'284', u'@key': u'3_32003327', u'first_page': u'2593', u'issue': u'5'}")],
[('AUTHOR_LAST_NAME', u'Csaki'), ('JOURNAL', u'Annual'), ('JOURNAL', u'review'), ('JOURNAL', u'of'), ('JOURNAL', u'nutrition'), ('VOLUME', u'30'), ('YEAR', u'2010'), ('PAGE', u'257'), ('DOI', u'10.1146/annurev.nutr.012809.104729'), ('ISSN', u'0199-9885'), ('REFSTR', "{u'journal_title': u'Annual review of nutrition', u'doi': u'10.1146/annurev.nutr.012809.104729', u'author': u'Csaki', u'issn': u'0199-9885', u'cyear': u'2010', u'volume': u'30', u'@key': u'4_37679942', u'first_page': u'257'}")],
[('AUTHOR_LAST_NAME', u'Harris'), ('JOURNAL', u'Trends'), ('JOURNAL', u'in'), ('JOURNAL', u'endocrinology'), ('JOURNAL', u'and'), ('JOURNAL', u'metabolism:'), ('JOURNAL', u'TEM'), ('VOLUME', u'22'), ('ISSUE', u'6'), ('YEAR', u'2011'), ('PAGE', u'226'), ('DOI', u'10.1016/j.tem.2011.02.006'), ('ISSN', u'1043-2760'), ('REFSTR', "{u'journal_title': u'Trends in endocrinology and metabolism: TEM', u'doi': u'10.1016/j.tem.2011.02.006', u'author': u'Harris', u'issn': u'1043-2760', u'cyear': u'2011', u'volume': u'22', u'@key': u'5_39678745', u'first_page': u'226', u'issue': u'6'}")],
[('AUTHOR_LAST_NAME', u'Lusis'), ('JOURNAL', u'Nature'), ('JOURNAL', u'reviews.'), ('JOURNAL', u'Genetics'), ('VOLUME', u'9'), ('ISSUE', u'11'), ('YEAR', u'2008'), ('PAGE', u'819'), ('DOI', u'10.1038/nrg2468'), ('ISSN', u'1471-0056'), ('REFSTR', "{u'journal_title': u'Nature reviews. Genetics', u'doi': u'10.1038/nrg2468', u'author': u'Lusis', u'issn': u'1471-0056', u'cyear': u'2008', u'volume': u'9', u'@key': u'6_32208845', u'first_page': u'819', u'issue': u'11'}")],
[('AUTHOR_LAST_NAME', u'terfy'), ('AUTHOR_FIRST_NAME', u'P'), ('JOURNAL', u'Nature'), ('JOURNAL', u'genetics'), ('VOLUME', u'27'), ('ISSUE', u'1'), ('YEAR', u'2001'), ('PAGE', u'121'), ('DOI', u'10.1038/83685'), ('ISSN', u'1061-4036'), ('REFSTR', "{u'journal_title': u'Nature genetics', u'doi': u'10.1038/83685', u'author': u'P terfy', u'issn': u'1061-4036', u'cyear': u'2001', u'volume': u'27', u'@key': u'7_11010410', u'first_page': u'121', u'issue': u'1'}")],
[('AUTHOR_LAST_NAME', u'Reue'), ('JOURNAL', u'The'), ('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Lipid'), ('JOURNAL', u'Research'), ('VOLUME', u'41'), ('ISSUE', u'7'), ('YEAR', u'2000'), ('PAGE', u'1067'), ('ISSN', u'0022-2275'), ('REFSTR', "{u'journal_title': u'The Journal of Lipid Research', u'author': u'Reue', u'issn': u'0022-2275', u'cyear': u'2000', u'volume': u'41', u'@key': u'8_10380403', u'first_page': u'1067', u'issue': u'7'}")],
[('AUTHOR_LAST_NAME', u'Michot'), ('JOURNAL', u'Human'), ('JOURNAL', u'mutation'), ('VOLUME', u'31'), ('ISSUE', u'7'), ('YEAR', u'2010'), ('PAGE', u'E1564'), ('DOI', u'10.1002/humu.21282'), ('ISSN', u'1059-7794'), ('REFSTR', "{u'journal_title': u'Human mutation', u'doi': u'10.1002/humu.21282', u'author': u'Michot', u'issn': u'1059-7794', u'cyear': u'2010', u'volume': u'31', u'@key': u'9_37588216', u'first_page': u'E1564', u'issue': u'7'}")],
[('AUTHOR_LAST_NAME', u'Zeharia'), ('JOURNAL', u'American'), ('JOURNAL', u'journal'), ('JOURNAL', u'of'), ('JOURNAL', u'human'), ('JOURNAL', u'genetics'), ('VOLUME', u'83'), ('ISSUE', u'4'), ('YEAR', u'2008'), ('PAGE', u'489'), ('DOI', u'10.1016/j.ajhg.2008.09.002'), ('ISSN', u'0002-9297'), ('REFSTR', "{u'journal_title': u'American journal of human genetics', u'doi': u'10.1016/j.ajhg.2008.09.002', u'author': u'Zeharia', u'issn': u'0002-9297', u'cyear': u'2008', u'volume': u'83', u'@key': u'10_32035496', u'first_page': u'489', u'issue': u'4'}")],
[('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Biological'), ('JOURNAL', u'Chemistry'), ('VOLUME', u'282'), ('ISSUE', u'6'), ('YEAR', u'2007'), ('PAGE', u'3450'), ('DOI', u'10.1074/jbc.M610745200'), ('ISSN', u'0021-9258'), ('REFSTR', "{u'doi': u'10.1074/jbc.M610745200', u'journal_title': u'Journal of Biological Chemistry', u'issn': u'0021-9258', u'cyear': u'2007', u'volume': u'282', u'@key': u'11_23087899', u'first_page': u'3450', u'issue': u'6'}")],
[('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Biological'), ('JOURNAL', u'Chemistry'), ('VOLUME', u'282'), ('ISSUE', u'1'), ('YEAR', u'2007'), ('PAGE', u'277'), ('DOI', u'10.1074/jbc.M609537200'), ('ISSN', u'0021-9258'), ('REFSTR', "{u'doi': u'10.1074/jbc.M609537200', u'journal_title': u'Journal of Biological Chemistry', u'issn': u'0021-9258', u'cyear': u'2007', u'volume': u'282', u'@key': u'12_22982038', u'first_page': u'277', u'issue': u'1'}")],
[('AUTHOR_LAST_NAME', u'Nadra'), ('JOURNAL', u'Genes'), ('JOURNAL', u'Development'), ('VOLUME', u'22'), ('ISSUE', u'12'), ('YEAR', u'2008'), ('PAGE', u'1647'), ('DOI', u'10.1101/gad.1638008'), ('ISSN', u'0890-9369'), ('REFSTR', "{u'journal_title': u'Genes Development', u'doi': u'10.1101/gad.1638008', u'author': u'Nadra', u'issn': u'0890-9369', u'cyear': u'2008', u'volume': u'22', u'@key': u'13_31268117', u'first_page': u'1647', u'issue': u'12'}")],
[('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Biological'), ('JOURNAL', u'Chemistry'), ('VOLUME', u'287'), ('ISSUE', u'5'), ('YEAR', u'2012'), ('PAGE', u'3485'), ('DOI', u'10.1074/jbc.M111.296681'), ('ISSN', u'0021-9258'), ('REFSTR', "{u'doi': u'10.1074/jbc.M111.296681', u'journal_title': u'Journal of Biological Chemistry', u'issn': u'0021-9258', u'cyear': u'2012', u'volume': u'287', u'@key': u'14_41382195', u'first_page': u'3485', u'issue': u'5'}")],
[('AUTHOR_LAST_NAME', u'Finck'), ('VOLUME', u'4'), ('ISSUE', u'3'), ('YEAR', u'2006'), ('PAGE', u'199'), ('DOI', u'10.1016/j.cmet.2006.08.005'), ('ISSN', u'1550-4131'), ('REFSTR', "{u'doi': u'10.1016/j.cmet.2006.08.005', u'author': u'Finck', u'issn': u'1550-4131', u'cyear': u'2006', u'volume': u'4', u'@key': u'15_22568095', u'first_page': u'199', u'issue': u'3'}")],
[('JOURNAL', u'Molecular'), ('JOURNAL', u'and'), ('JOURNAL', u'Cellular'), ('JOURNAL', u'Biology'), ('VOLUME', u'30'), ('ISSUE', u'12'), ('YEAR', u'2010'), ('PAGE', u'3126'), ('DOI', u'10.1128/MCB.01671-09'), ('ISSN', u'0270-7306'), ('REFSTR', "{u'doi': u'10.1128/MCB.01671-09', u'journal_title': u'Molecular and Cellular Biology', u'issn': u'0270-7306', u'cyear': u'2010', u'volume': u'30', u'@key': u'16_37038059', u'first_page': u'3126', u'issue': u'12'}")],
[('AUTHOR_LAST_NAME', u'Al-Mosawi'), ('JOURNAL', u'Arthritis'), ('JOURNAL', u'and'), ('JOURNAL', u'rheumatism'), ('VOLUME', u'56'), ('ISSUE', u'3'), ('YEAR', u'2007'), ('PAGE', u'960'), ('DOI', u'10.1002/art.22431'), ('ISSN', u'0004-3591'), ('REFSTR', "{u'journal_title': u'Arthritis and rheumatism', u'doi': u'10.1002/art.22431', u'author': u'Al-Mosawi', u'issn': u'0004-3591', u'cyear': u'2007', u'volume': u'56', u'@key': u'17_23716909', u'first_page': u'960', u'issue': u'3'}")],
[('AUTHOR_LAST_NAME', u'Ferguson'), ('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Medical'), ('JOURNAL', u'Genetics'), ('VOLUME', u'42'), ('ISSUE', u'7'), ('YEAR', u'2005'), ('PAGE', u'551'), ('DOI', u'10.1136/jmg.2005.030759'), ('ISSN', u'0022-2593'), ('REFSTR', "{u'journal_title': u'Journal of Medical Genetics', u'doi': u'10.1136/jmg.2005.030759', u'author': u'Ferguson', u'issn': u'0022-2593', u'cyear': u'2005', u'volume': u'42', u'@key': u'18_19074305', u'first_page': u'551', u'issue': u'7'}")],
[('AUTHOR_LAST_NAME', u'Majeed'), ('JOURNAL', u'European'), ('JOURNAL', u'journal'), ('JOURNAL', u'of'), ('JOURNAL', u'pediatrics'), ('VOLUME', u'160'), ('ISSUE', u'12'), ('YEAR', u'2001'), ('PAGE', u'705'), ('ISSN', u'0340-6199'), ('REFSTR', "{u'journal_title': u'European journal of pediatrics', u'author': u'Majeed', u'issn': u'0340-6199', u'cyear': u'2001', u'volume': u'160', u'@key': u'19_11478957', u'first_page': u'705', u'issue': u'12'}")],
[('AUTHOR_LAST_NAME', u'Majeed'), ('JOURNAL', u'The'), ('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'pediatrics'), ('VOLUME', u'115'), ('ISSUE', u'5 Pt 1'), ('YEAR', u'1989'), ('PAGE', u'730'), ('DOI', u'10.1016/S0022-3476(89)80650-X'), ('ISSN', u'0022-3476'), ('REFSTR', "{u'journal_title': u'The Journal of pediatrics', u'doi': u'10.1016/S0022-3476(89)80650-X', u'author': u'Majeed', u'issn': u'0022-3476', u'cyear': u'1989', u'volume': u'115', u'@key': u'21_5432040', u'first_page': u'730', u'issue': u'5 Pt 1'}")],
[('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Biological'), ('JOURNAL', u'Chemistry'), ('VOLUME', u'284'), ('ISSUE', u'43'), ('YEAR', u'2009'), ('PAGE', u'29968'), ('DOI', u'10.1074/jbc.M109.023663'), ('ISSN', u'0021-9258'), ('REFSTR', "{u'doi': u'10.1074/jbc.M109.023663', u'journal_title': u'Journal of Biological Chemistry', u'issn': u'0021-9258', u'cyear': u'2009', u'volume': u'284', u'@key': u'22_35496169', u'first_page': u'29968', u'issue': u'43'}")],
[('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Biological'), ('JOURNAL', u'Chemistry'), ('VOLUME', u'284'), ('ISSUE', u'11'), ('YEAR', u'2009'), ('PAGE', u'6763'), ('DOI', u'10.1074/jbc.M807882200'), ('ISSN', u'0021-9258'), ('REFSTR', "{u'doi': u'10.1074/jbc.M807882200', u'journal_title': u'Journal of Biological Chemistry', u'issn': u'0021-9258', u'cyear': u'2009', u'volume': u'284', u'@key': u'23_33354200', u'first_page': u'6763', u'issue': u'11'}")],
[('JOURNAL', u'Diabetes'), ('VOLUME', u'60'), ('ISSUE', u'4'), ('YEAR', u'2011'), ('PAGE', u'1072'), ('DOI', u'10.2337/db10-1046'), ('ISSN', u'0012-1797'), ('REFSTR', "{u'doi': u'10.2337/db10-1046', u'journal_title': u'Diabetes', u'issn': u'0012-1797', u'cyear': u'2011', u'volume': u'60', u'@key': u'24_39324740', u'first_page': u'1072', u'issue': u'4'}")],
[('JOURNAL', u'Progress'), ('JOURNAL', u'in'), ('JOURNAL', u'neurobiology'), ('VOLUME', u'63'), ('ISSUE', u'5'), ('YEAR', u'2001'), ('PAGE', u'489'), ('DOI', u'10.1016/S0301-0082(00)00024-1'), ('ISSN', u'0301-0082'), ('REFSTR', "{u'journal_title': u'Progress in neurobiology', u'doi': u'10.1016/S0301-0082(00)00024-1', u'author': u'Gr sser-Cornehls', u'issn': u'0301-0082', u'cyear': u'2001', u'volume': u'63', u'@key': u'25_11043813', u'first_page': u'489', u'issue': u'5'}")],
[('AUTHOR_LAST_NAME', u'Morton'), ('JOURNAL', u'The'), ('JOURNAL', u'Neuroscientist'), ('VOLUME', u'10'), ('ISSUE', u'3'), ('YEAR', u'2004'), ('PAGE', u'247'), ('DOI', u'10.1177/1073858404263517'), ('ISSN', u'1073-8584'), ('REFSTR', "{u'journal_title': u'The Neuroscientist', u'doi': u'10.1177/1073858404263517', u'author': u'Morton', u'issn': u'1073-8584', u'cyear': u'2004', u'volume': u'10', u'@key': u'26_18192684', u'first_page': u'247', u'issue': u'3'}")],
[('AUTHOR_LAST_NAME', u'Giusto'), ('JOURNAL', u'Neurochemical'), ('JOURNAL', u'research'), ('VOLUME', u'27'), ('ISSUE', u'11'), ('YEAR', u'2002'), ('PAGE', u'1513'), ('DOI', u'10.1023/A:1021604623208'), ('ISSN', u'0364-3190'), ('REFSTR', "{u'journal_title': u'Neurochemical research', u'doi': u'10.1023/A:1021604623208', u'author': u'Giusto', u'issn': u'0364-3190', u'cyear': u'2002', u'volume': u'27', u'@key': u'27_17392329', u'first_page': u'1513', u'issue': u'11'}")],
[('AUTHOR_LAST_NAME', u'Pasquar'), ('JOURNAL', u'Experimental'), ('JOURNAL', u'gerontology'), ('VOLUME', u'36'), ('ISSUE', u'8'), ('YEAR', u'2001'), ('PAGE', u'1387'), ('DOI', u'10.1016/S0531-5565(01)00106-1'), ('ISSN', u'0531-5565'), ('REFSTR', "{u'journal_title': u'Experimental gerontology', u'doi': u'10.1016/S0531-5565(01)00106-1', u'author': u'Pasquar', u'issn': u'0531-5565', u'cyear': u'2001', u'volume': u'36', u'@key': u'28_11359831', u'first_page': u'1387', u'issue': u'8'}")],
[('VOLUME', u'39'), ('YEAR', u'2004'), ('PAGE', u'553'), ('ISSN', u'1558-9307'), ('REFSTR', "{u'volume': u'39', u'@key': u'29_43576545', u'first_page': u'553', u'issn': u'1558-9307', u'cyear': u'2004'}")],
[('JOURNAL', u'The'), ('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Lipid'), ('JOURNAL', u'Research'), ('VOLUME', u'53'), ('ISSUE', u'1'), ('YEAR', u'2012'), ('PAGE', u'105'), ('DOI', u'10.1194/jlr.M019430'), ('ISSN', u'0022-2275'), ('REFSTR', "{u'doi': u'10.1194/jlr.M019430', u'journal_title': u'The Journal of Lipid Research', u'issn': u'0022-2275', u'cyear': u'2012', u'volume': u'53', u'@key': u'30_41159612', u'first_page': u'105', u'issue': u'1'}")],
[('JOURNAL', u'The'), ('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Lipid'), ('JOURNAL', u'Research'), ('VOLUME', u'49'), ('ISSUE', u'12'), ('YEAR', u'2008'), ('PAGE', u'2493'), ('DOI', u'10.1194/jlr.R800019-JLR200'), ('ISSN', u'0022-2275'), ('REFSTR', "{u'doi': u'10.1194/jlr.R800019-JLR200', u'journal_title': u'The Journal of Lipid Research', u'issn': u'0022-2275', u'cyear': u'2008', u'volume': u'49', u'@key': u'31_31907829', u'first_page': u'2493', u'issue': u'12'}")],
[('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Biological'), ('JOURNAL', u'Chemistry'), ('VOLUME', u'283'), ('ISSUE', u'43'), ('YEAR', u'2008'), ('PAGE', u'29166'), ('DOI', u'10.1074/jbc.M804278200'), ('ISSN', u'0021-9258'), ('REFSTR', "{u'doi': u'10.1074/jbc.M804278200', u'journal_title': u'Journal of Biological Chemistry', u'issn': u'0021-9258', u'cyear': u'2008', u'volume': u'283', u'@key': u'32_31689109', u'first_page': u'29166', u'issue': u'43'}")],
[('AUTHOR_LAST_NAME', u'Liu'), ('JOURNAL', u'The'), ('JOURNAL', u'Biochemical'), ('JOURNAL', u'journal'), ('VOLUME', u'432'), ('ISSUE', u'1'), ('YEAR', u'2010'), ('PAGE', u'65'), ('DOI', u'10.1042/BJ20100584'), ('ISSN', u'0264-6021'), ('REFSTR', "{u'journal_title': u'The Biochemical journal', u'doi': u'10.1042/BJ20100584', u'author': u'Liu', u'issn': u'0264-6021', u'cyear': u'2010', u'volume': u'432', u'@key': u'33_38002008', u'first_page': u'65', u'issue': u'1'}")],
[('AUTHOR_LAST_NAME', u'Stapleton'), ('VOLUME', u'6'), ('ISSUE', u'4'), ('YEAR', u'2011'), ('PAGE', u'e18932'), ('DOI', u'10.1371/journal.pone.0018932'), ('ISSN', u'1932-6203'), ('REFSTR', "{u'doi': u'10.1371/journal.pone.0018932', u'author': u'Stapleton', u'issn': u'1932-6203', u'cyear': u'2011', u'volume': u'6', u'@key': u'34_39827515', u'first_page': u'e18932', u'issue': u'4'}")],
[('JOURNAL', u'PNAS'), ('VOLUME', u'109'), ('ISSUE', u'5'), ('YEAR', u'2012'), ('PAGE', u'1667'), ('DOI', u'10.1073/pnas.1110730109'), ('ISSN', u'0027-8424'), ('REFSTR', "{u'doi': u'10.1073/pnas.1110730109', u'journal_title': u'PNAS', u'issn': u'0027-8424', u'cyear': u'2012', u'volume': u'109', u'@key': u'35_41527788', u'first_page': u'1667', u'issue': u'5'}")],
[('AUTHOR_LAST_NAME', u'Pyne'), ('JOURNAL', u'Advances'), ('JOURNAL', u'in'), ('JOURNAL', u'enzyme'), ('JOURNAL', u'regulation'), ('VOLUME', u'49'), ('ISSUE', u'1'), ('YEAR', u'2009'), ('PAGE', u'214'), ('DOI', u'10.1016/j.advenzreg.2009.01.011'), ('ISSN', u'0065-2571'), ('REFSTR', "{u'journal_title': u'Advances in enzyme regulation', u'doi': u'10.1016/j.advenzreg.2009.01.011', u'author': u'Pyne', u'issn': u'0065-2571', u'cyear': u'2009', u'volume': u'49', u'@key': u'36_35090688', u'first_page': u'214', u'issue': u'1'}")],
[('AUTHOR_LAST_NAME', u'Brindley'), ('JOURNAL', u'Biochimica'), ('JOURNAL', u'et'), ('JOURNAL', u'Biophysica'), ('JOURNAL', u'Acta.'), ('JOURNAL', u'Protein'), ('JOURNAL', u'Structure'), ('JOURNAL', u'and'), ('JOURNAL', u'Molecular'), ('JOURNAL', u'Enzymology'), ('VOLUME', u'1791'), ('ISSUE', u'9'), ('YEAR', u'2009'), ('PAGE', u'956'), ('DOI', u'10.1016/j.bbalip.2009.02.007'), ('ISSN', u'0006-3002'), ('REFSTR', "{u'journal_title': u'Biochimica et Biophysica Acta. Protein Structure and Molecular Enzymology', u'doi': u'10.1016/j.bbalip.2009.02.007', u'author': u'Brindley', u'issn': u'0006-3002', u'cyear': u'2009', u'volume': u'1791', u'@key': u'37_34190083', u'first_page': u'956', u'issue': u'9'}")],
[('AUTHOR_LAST_NAME', u'Brusse'), ('JOURNAL', u'Clinical'), ('JOURNAL', u'genetics'), ('VOLUME', u'71'), ('ISSUE', u'1'), ('YEAR', u'2007'), ('PAGE', u'12'), ('ISSN', u'0009-9163'), ('REFSTR', "{u'journal_title': u'Clinical genetics', u'author': u'Brusse', u'issn': u'0009-9163', u'cyear': u'2007', u'volume': u'71', u'@key': u'38_23502795', u'first_page': u'12', u'issue': u'1'}")],
[('AUTHOR_LAST_NAME', u'Friedel'), ('JOURNAL', u'Methods'), ('JOURNAL', u'in'), ('JOURNAL', u'enzymology'), ('VOLUME', u'477'), ('YEAR', u'2010'), ('PAGE', u'243'), ('DOI', u'10.1016/S0076-6879(10)77013-0'), ('ISSN', u'0076-6879'), ('REFSTR', "{u'journal_title': u'Methods in enzymology', u'doi': u'10.1016/S0076-6879(10)77013-0', u'author': u'Friedel', u'issn': u'0076-6879', u'cyear': u'2010', u'volume': u'477', u'@key': u'39_37904446', u'first_page': u'243'}")],
[('JOURNAL', u'PNAS'), ('VOLUME', u'102'), ('ISSUE', u'37'), ('YEAR', u'2005'), ('PAGE', u'13188'), ('DOI', u'10.1073/pnas.0505474102'), ('ISSN', u'0027-8424'), ('REFSTR', "{u'doi': u'10.1073/pnas.0505474102', u'journal_title': u'PNAS', u'issn': u'0027-8424', u'cyear': u'2005', u'volume': u'102', u'@key': u'40_19690687', u'first_page': u'13188', u'issue': u'37'}")],
[('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Biological'), ('JOURNAL', u'Chemistry'), ('VOLUME', u'279'), ('ISSUE', u'28'), ('YEAR', u'2004'), ('PAGE', u'29558'), ('DOI', u'10.1074/jbc.M403506200'), ('ISSN', u'0021-9258'), ('REFSTR', "{u'doi': u'10.1074/jbc.M403506200', u'journal_title': u'Journal of Biological Chemistry', u'issn': u'0021-9258', u'cyear': u'2004', u'volume': u'279', u'@key': u'41_19568952', u'first_page': u'29558', u'issue': u'28'}")],
[('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Clinical'), ('JOURNAL', u'Endocrinology'), ('JOURNAL', u'Metabolism'), ('VOLUME', u'93'), ('ISSUE', u'1'), ('YEAR', u'2008'), ('PAGE', u'233'), ('DOI', u'10.1210/jc.2007-1535'), ('ISSN', u'0021-972X'), ('REFSTR', "{u'doi': u'10.1210/jc.2007-1535', u'journal_title': u'Journal of Clinical Endocrinology Metabolism', u'issn': u'0021-972X', u'cyear': u'2008', u'volume': u'93', u'@key': u'42_29585195', u'first_page': u'233', u'issue': u'1'}")],
[('JOURNAL', u'The'), ('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Lipid'), ('JOURNAL', u'Research'), ('VOLUME', u'49'), ('ISSUE', u'7'), ('YEAR', u'2008'), ('PAGE', u'1519'), ('DOI', u'10.1194/jlr.M800061-JLR200'), ('ISSN', u'0022-2275'), ('REFSTR', "{u'doi': u'10.1194/jlr.M800061-JLR200', u'journal_title': u'The Journal of Lipid Research', u'issn': u'0022-2275', u'cyear': u'2008', u'volume': u'49', u'@key': u'43_30697979', u'first_page': u'1519', u'issue': u'7'}")],
[('JOURNAL', u'The'), ('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Lipid'), ('JOURNAL', u'Research'), ('VOLUME', u'50'), ('ISSUE', u'1'), ('YEAR', u'2009'), ('PAGE', u'47'), ('DOI', u'10.1194/jlr.M800204-JLR200'), ('ISSN', u'0022-2275'), ('REFSTR', "{u'doi': u'10.1194/jlr.M800204-JLR200', u'journal_title': u'The Journal of Lipid Research', u'issn': u'0022-2275', u'cyear': u'2009', u'volume': u'50', u'@key': u'44_31842846', u'first_page': u'47', u'issue': u'1'}")],
[('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Biological'), ('JOURNAL', u'Chemistry'), ('VOLUME', u'286'), ('ISSUE', u'1'), ('YEAR', u'2011'), ('PAGE', u'380'), ('DOI', u'10.1074/jbc.M110.184754'), ('ISSN', u'0021-9258'), ('REFSTR', "{u'doi': u'10.1074/jbc.M110.184754', u'journal_title': u'Journal of Biological Chemistry', u'issn': u'0021-9258', u'cyear': u'2011', u'volume': u'286', u'@key': u'45_38471137', u'first_page': u'380', u'issue': u'1'}")],
[('JOURNAL', u'Arteriosclerosis,'), ('JOURNAL', u'Thrombosis,'), ('JOURNAL', u'and'), ('JOURNAL', u'Vascular'), ('JOURNAL', u'Biology'), ('VOLUME', u'31'), ('ISSUE', u'1'), ('YEAR', u'2011'), ('PAGE', u'58'), ('DOI', u'10.1161/ATVBAHA.110.210906'), ('ISSN', u'0276-5047'), ('REFSTR', "{u'doi': u'10.1161/ATVBAHA.110.210906', u'journal_title': u'Arteriosclerosis, Thrombosis, and Vascular Biology', u'issn': u'0276-5047', u'cyear': u'2011', u'volume': u'31', u'@key': u'46_38308457', u'first_page': u'58', u'issue': u'1'}")],
[('JOURNAL', u'The'), ('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Lipid'), ('JOURNAL', u'Research'), ('VOLUME', u'47'), ('ISSUE', u'4'), ('YEAR', u'2006'), ('PAGE', u'745'), ('DOI', u'10.1194/jlr.M500553-JLR200'), ('ISSN', u'0022-2275'), ('REFSTR', "{u'doi': u'10.1194/jlr.M500553-JLR200', u'journal_title': u'The Journal of Lipid Research', u'issn': u'0022-2275', u'cyear': u'2006', u'volume': u'47', u'@key': u'47_21476290', u'first_page': u'745', u'issue': u'4'}")],
[('AUTHOR_LAST_NAME', u'Hildebrand'), ('JOURNAL', u'Computer'), ('JOURNAL', u'methods'), ('JOURNAL', u'in'), ('JOURNAL', u'biomechanics'), ('JOURNAL', u'and'), ('JOURNAL', u'biomedical'), ('JOURNAL', u'engineering'), ('VOLUME', u'1'), ('ISSUE', u'1'), ('YEAR', u'1997'), ('PAGE', u'15'), ('ISSN', u'1025-5842'), ('REFSTR', "{u'journal_title': u'Computer methods in biomechanics and biomedical engineering', u'author': u'Hildebrand', u'issn': u'1025-5842', u'cyear': u'1997', u'volume': u'1', u'@key': u'48_19205922', u'first_page': u'15', u'issue': u'1'}")],
[('AUTHOR_LAST_NAME', u'Rogers'), ('JOURNAL', u'Mammalian'), ('JOURNAL', u'genome'), ('JOURNAL', u':'), ('JOURNAL', u'official'), ('JOURNAL', u'journal'), ('JOURNAL', u'of'), ('JOURNAL', u'the'), ('JOURNAL', u'International'), ('JOURNAL', u'Mammalian'), ('JOURNAL', u'Genome'), ('JOURNAL', u'Society'), ('VOLUME', u'8'), ('ISSUE', u'10'), ('YEAR', u'1997'), ('PAGE', u'711'), ('DOI', u'10.1007/s003359900551'), ('ISSN', u'0938-8990'), ('REFSTR', "{u'journal_title': u'Mammalian genome : official journal of the International Mammalian Genome Society', u'doi': u'10.1007/s003359900551', u'author': u'Rogers', u'issn': u'0938-8990', u'cyear': u'1997', u'volume': u'8', u'@key': u'49_5758712', u'first_page': u'711', u'issue': u'10'}")],
[('AUTHOR_LAST_NAME', u'Hockly'), ('JOURNAL', u'Annals'), ('JOURNAL', u'of'), ('JOURNAL', u'neurology'), ('VOLUME', u'51'), ('ISSUE', u'2'), ('YEAR', u'2002'), ('PAGE', u'235'), ('DOI', u'10.1002/ana.10094'), ('ISSN', u'0364-5134'), ('REFSTR', "{u'journal_title': u'Annals of neurology', u'doi': u'10.1002/ana.10094', u'author': u'Hockly', u'issn': u'0364-5134', u'cyear': u'2002', u'volume': u'51', u'@key': u'50_16905872', u'first_page': u'235', u'issue': u'2'}")],
[('JOURNAL', u'CAN'), ('JOURNAL', u'J'), ('JOURNAL', u'BIOCHEM'), ('JOURNAL', u'PHYSIOL'), ('VOLUME', u'37'), ('YEAR', u'1959'), ('PAGE', u'911'), ('REFSTR', "{u'volume': u'37', u'@key': u'51_28010790', u'first_page': u'911', u'cyear': u'1959', u'journal_title': u'CAN J BIOCHEM PHYSIOL'}")],
[('VOLUME', u'67'), ('YEAR', u'2006'), ('PAGE', u'1907'), ('ISSN', u'1873-3700'), ('REFSTR', "{u'volume': u'67', u'@key': u'52_35218979', u'first_page': u'1907', u'issn': u'1873-3700', u'cyear': u'2006'}")],
[('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Biological'), ('JOURNAL', u'Chemistry'), ('VOLUME', u'277'), ('ISSUE', u'35'), ('YEAR', u'2002'), ('PAGE', u'31994'), ('DOI', u'10.1074/jbc.M205375200'), ('ISSN', u'0021-9258'), ('REFSTR', "{u'doi': u'10.1074/jbc.M205375200', u'journal_title': u'Journal of Biological Chemistry', u'issn': u'0021-9258', u'cyear': u'2002', u'volume': u'277', u'@key': u'53_19556404', u'first_page': u'31994', u'issue': u'35'}")],
[('JOURNAL', u'American'), ('JOURNAL', u'Journal'), ('JOURNAL', u'of'), ('JOURNAL', u'Physiology'), ('JOURNAL', u'-'), ('JOURNAL', u'Endocrinology'), ('JOURNAL', u'And'), ('JOURNAL', u'Metabolism'), ('VOLUME', u'296'), ('ISSUE', u'6'), ('YEAR', u'2009'), ('PAGE', u'E1195'), ('ISSN', u'0193-1849'), ('REFSTR', "{u'journal_title': u'American Journal of Physiology - Endocrinology And Metabolism', u'issn': u'0193-1849', u'cyear': u'2009', u'volume': u'296', u'@key': u'54_34480394', u'first_page': u'E1195', u'issue': u'6'}")],
[('JOURNAL', u'Annual'), ('JOURNAL', u'review'), ('JOURNAL', u'of'), ('JOURNAL', u'nutrition'), ('VOLUME', u'30'), ('YEAR', u'2010'), ('PAGE', u'257'), ('ISSN', u'0199-9885'), ('REFSTR', "{u'journal_title': u'Annual review of nutrition', u'issn': u'0199-9885', u'cyear': u'2010', u'volume': u'30', u'@key': u'55_37679942', u'first_page': u'257'}")],
[('JOURNAL', u'Genes'), ('JOURNAL', u'Development'), ('VOLUME', u'22'), ('ISSUE', u'12'), ('YEAR', u'2008'), ('PAGE', u'1647'), ('ISSN', u'0890-9369'), ('REFSTR', "{u'journal_title': u'Genes Development', u'issn': u'0890-9369', u'cyear': u'2008', u'volume': u'22', u'@key': u'57_31268117', u'first_page': u'1647', u'issue': u'12'}")],
[('JOURNAL', u'Clinical'), ('JOURNAL', u'genetics'), ('VOLUME', u'71'), ('ISSUE', u'1'), ('YEAR', u'2007'), ('PAGE', u'12'), ('ISSN', u'0009-9163'), ('REFSTR', "{u'journal_title': u'Clinical genetics', u'issn': u'0009-9163', u'cyear': u'2007', u'volume': u'71', u'@key': u'58_23502795', u'first_page': u'12', u'issue': u'1'}")]]]
train_2 = [[[('AUTHOR_FIRST_NAME', u'A.'), ('AUTHOR_LAST_NAME', u'Abedin'), ('AUTHOR_FIRST_NAME', u'P.'), ('AUTHOR_LAST_NAME', u'Spurn'), ('AUTHOR_FIRST_NAME', u'P.'), ('AUTHOR_LAST_NAME', u'Wiegert'), ('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Pokorn'), ('AUTHOR_FIRST_NAME', u'J.'), ('AUTHOR_LAST_NAME', u'Borovicka'), ('AUTHOR_FIRST_NAME', u'P.'), ('AUTHOR_LAST_NAME', u'Brown'), ('JOURNAL', u'Icarus'), ('VOLUME', u'261'), ('YEAR', u'2015'), ('PAGE', u'100-117')],
[('AUTHOR_FIRST_NAME', u'S.H.'), ('AUTHOR_LAST_NAME', u'Ahn'), ('JOURNAL', u'MNRAS'), ('VOLUME', u'343'), ('YEAR', u'2003'), ('PAGE', u'1095-1100')],
[('AUTHOR_FIRST_NAME', u'S.H.'), ('AUTHOR_LAST_NAME', u'Ahn'), ('JOURNAL', u'Earth'), ('JOURNAL', u'Moon'), ('JOURNAL', u'Planets'), ('VOLUME', u'95'), ('YEAR', u'2004'), ('PAGE', u'63-68')],
[('AUTHOR_FIRST_NAME', u'S.H.'), ('AUTHOR_LAST_NAME', u'Ahn'), ('JOURNAL', u'MNRAS'), ('VOLUME', u'358'), ('YEAR', u'2005'), ('PAGE', u'1105-1115')],
[('AUTHOR_FIRST_NAME', u'T.R'), ('AUTHOR_LAST_NAME', u'Arter'), ('AUTHOR_FIRST_NAME', u'I.P.'), ('AUTHOR_LAST_NAME', u'Williams'), ('JOURNAL', u'MNRAS'), ('VOLUME', u'286'), ('YEAR', u'1997'), ('PAGE', u'163-172')],
[('AUTHOR_FIRST_NAME', u'T.R'), ('AUTHOR_LAST_NAME', u'Arter'), ('AUTHOR_FIRST_NAME', u'I.P.'), ('AUTHOR_LAST_NAME', u'Williams'), ('JOURNAL', u'MNRAS'), ('VOLUME', u'288'), ('YEAR', u'1997'), ('PAGE', u'721-728')],
[('AUTHOR_COLLABORATION', u'Beijing Observatory'), ('TITLE', u'General'), ('TITLE', u'Compilation'), ('TITLE', u'of'), ('TITLE', u'Chinese'), ('TITLE', u'ancient'), ('TITLE', u'Astronomical'), ('TITLE', u'Records:'), ('PUBLISHER', u'Beijing'), ('PUBLISHER', u'Observatory'), ('YEAR', u'1988')],
[('AUTHOR_FIRST_NAME', u'P.'), ('AUTHOR_LAST_NAME', u'Brown'), ('AUTHOR_FIRST_NAME', u'D.K.'), ('AUTHOR_LAST_NAME', u'Wong'), ('AUTHOR_FIRST_NAME', u'R.J.'), ('AUTHOR_LAST_NAME', u'Weryk'), ('AUTHOR_FIRST_NAME', u'P.'), ('AUTHOR_LAST_NAME', u'Wiegert'), ('JOURNAL', u'Icarus'), ('VOLUME', u'207'), ('ISSUE', u'1'), ('YEAR', u'2010'), ('PAGE', u'66-81')],
[('AUTHOR_FIRST_NAME', u'M.'), ('AUTHOR_LAST_NAME', u'Chasles'), ('TITLE', u'Catalogue'), ('TITLE', u"d'aparitions"), ('TITLE', u'dtoiles'), ('TITLE', u'filantes'), ('TITLE', u'pendant'), ('TITLE', u'six'), ('TITLE', u'sicles;'), ('TITLE', u'de'), ('TITLE', u'538'), ('TITLE', u'a'), ('TITLE', u'1123:'), ('JOURNAL', u'Comptes'), ('JOURNAL', u'rendus'), ('JOURNAL', u'de'), ('JOURNAL', u"l'academie"), ('JOURNAL', u'des'), ('JOURNAL', u'Sci.'), ('VOLUME', u'12'), ('YEAR', u'1841'), ('PAGE', u'499-509')],
[('AUTHOR_FIRST_NAME', u'D.'), ('AUTHOR_LAST_NAME', u'Cook'), ('JOURNAL', u'JHA'), ('VOLUME', u'xxx'), ('YEAR', u'1999'), ('PAGE', u'131-160')],
[('AUTHOR_FIRST_NAME', u'U.'), ('AUTHOR_LAST_NAME', u"Dall'Olmo"), ('JOURNAL', u'JHA'), ('VOLUME', u'ix'), ('YEAR', u'1978'), ('PAGE', u'123-134')],
[('AUTHOR_FIRST_NAME', u'U.'), ('AUTHOR_LAST_NAME', u"Dall'Olmo"), ('JOURNAL', u'JHA'), ('VOLUME', u'xi'), ('YEAR', u'1980'), ('PAGE', u'10-27')],
[('AUTHOR_FIRST_NAME', u'W.J.'), ('AUTHOR_LAST_NAME', u'Fisher'), ('JOURNAL', u'Bull.'), ('JOURNAL', u'Harvard'), ('JOURNAL', u'Coll.'), ('JOURNAL', u'Obs'), ('VOLUME', u'894'), ('YEAR', u'1934'), ('PAGE', u'15')],
[('AUTHOR_FIRST_NAME', u'K.'), ('AUTHOR_LAST_NAME', u'Fox'), ('TITLE', u'Asteroids,'), ('TITLE', u'Comets'), ('TITLE', u'and'), ('TITLE', u'Meteors:'), ('PAGE', u'521-525')],
[('AUTHOR_FIRST_NAME', u'K.'), ('AUTHOR_LAST_NAME', u'Fox'), ('AUTHOR_FIRST_NAME', u'I.P.'), ('AUTHOR_LAST_NAME', u'Williams'), ('AUTHOR_FIRST_NAME', u'D.W.'), ('AUTHOR_LAST_NAME', u'Hughes'), ('JOURNAL', u'MNRAS'), ('VOLUME', u'199'), ('YEAR', u'1982'), ('PAGE', u'313-324')],
[('AUTHOR_FIRST_NAME', u'K.'), ('AUTHOR_LAST_NAME', u'Fox'), ('AUTHOR_FIRST_NAME', u'I.P.'), ('AUTHOR_LAST_NAME', u'Williams'), ('AUTHOR_FIRST_NAME', u'D.W'), ('AUTHOR_LAST_NAME', u'Hughes'), ('JOURNAL', u'MNRAS'), ('VOLUME', u'217'), ('YEAR', u'1985'), ('PAGE', u'407-411')],
[('AUTHOR_FIRST_NAME', u'Y.'), ('AUTHOR_LAST_NAME', u'Fujiwara'), ('AUTHOR_FIRST_NAME', u'I.'), ('AUTHOR_LAST_NAME', u'Hasegawa'), ('PAGE', u'209-214')],
[('AUTHOR_FIRST_NAME', u'I.'), ('AUTHOR_LAST_NAME', u'Hasegawa'), ('JOURNAL', u'Publ.'), ('JOURNAL', u'Astron.'), ('JOURNAL', u'Soc.'), ('JOURNAL', u'Japan'), ('VOLUME', u'31'), ('YEAR', u'1979'), ('PAGE', u'257-270')],
[('AUTHOR_FIRST_NAME', u'I.'), ('AUTHOR_LAST_NAME', u'Hasegawa'), ('JOURNAL', u'Cel.'), ('JOURNAL', u'Mech.'), ('VOLUME', u'54'), ('YEAR', u'1992'), ('PAGE', u'129-142')],
[('AUTHOR_FIRST_NAME', u'I.'), ('AUTHOR_LAST_NAME', u'Hasegawa'), ('TITLE', u'Meteors'), ('TITLE', u'and'), ('TITLE', u'Their'), ('TITLE', u'Parent'), ('TITLE', u'Bodies:'), ('PAGE', u'209-223')],
[('AUTHOR_FIRST_NAME', u'I.'), ('AUTHOR_LAST_NAME', u'Hasegawa.'), ('JOURNAL', u'Q.'), ('JOURNAL', u'J.R.'), ('JOURNAL', u'Astron.'), ('JOURNAL', u'Soc.'), ('VOLUME', u'37'), ('YEAR', u'1996'), ('PAGE', u'75-78')],
[('AUTHOR_FIRST_NAME', u'I.'), ('AUTHOR_LAST_NAME', u'Hasegawa'), ('TITLE', u'In'), ('TITLE', u'Meteoroids'), ('TITLE', u'1998:'), ('PUBLISHER', u'Astron.'), ('PUBLISHER', u'Inst.,'), ('PUBLISHER', u'Slovak'), ('PUBLISHER', u'Acad.'), ('PUBLISHER', u'Sci.'), ('YEAR', u'1999'), ('PAGE', u'177-184')],
[('AUTHOR_FIRST_NAME', u'I.'), ('AUTHOR_LAST_NAME', u'Hasegawa'), ('TITLE', u'Meteoroids'), ('TITLE', u'1998:'), ('PAGE', u'153-156')],
[('AUTHOR_FIRST_NAME', u'H.P.'), ('AUTHOR_LAST_NAME', u'Yoke'), ('JOURNAL', u'Vistas'), ('JOURNAL', u'Astron.'), ('VOLUME', u'5'), ('YEAR', u'1962'), ('PAGE', u'127-225')],
[('AUTHOR_FIRST_NAME', u'D.W.'), ('AUTHOR_LAST_NAME', u'Hughes'), ('AUTHOR_FIRST_NAME', u'B.'), ('AUTHOR_LAST_NAME', u'Emerson'), ('JOURNAL', u'Observatory'), ('VOLUME', u'102'), ('YEAR', u'1982'), ('PAGE', u'39-42')],
[('AUTHOR_FIRST_NAME', u'S.'), ('AUTHOR_LAST_NAME', u'Imoto'), ('AUTHOR_FIRST_NAME', u'I.'), ('AUTHOR_LAST_NAME', u'Hasegawa'), ('JOURNAL', u'Smithsonian'), ('JOURNAL', u'Contrib.'), ('JOURNAL', u'Astrophys.'), ('VOLUME', u'2'), ('YEAR', u'1958'), ('PAGE', u'131-144')],
[('AUTHOR_FIRST_NAME', u'J.'), ('AUTHOR_LAST_NAME', u'Jenniskens'), ('JOURNAL', u'A&A'), ('VOLUME', u'287'), ('YEAR', u'1994'), ('PAGE', u'990-1013')],
[('AUTHOR_FIRST_NAME', u'J.'), ('AUTHOR_LAST_NAME', u'Jenniskens'), ('JOURNAL', u'A&A'), ('VOLUME', u'317'), ('YEAR', u'1997'), ('PAGE', u'953-961')],
[('AUTHOR_FIRST_NAME', u'J.'), ('AUTHOR_LAST_NAME', u'Jenniskens'), ('TITLE', u'Meteor'), ('TITLE', u'Showers'), ('TITLE', u'and'), ('TITLE', u'Their'), ('TITLE', u'Parent'), ('TITLE', u'Comets:'), ('PUBLISHER', u'Cambridge'), ('PUBLISHER', u'University'), ('PUBLISHER', u'Press'), ('YEAR', u'2006')],
[('AUTHOR_FIRST_NAME', u'J.'), ('AUTHOR_LAST_NAME', u'Jenniskens'), ('JOURNAL', u'Icarus'), ('VOLUME', u'266'), ('YEAR', u'2016'), ('PAGE', u'331-354')],
[('AUTHOR_FIRST_NAME', u'P.'), ('AUTHOR_LAST_NAME', u'Jenniskens'), ('AUTHOR_FIRST_NAME', u'H.'), ('AUTHOR_LAST_NAME', u'Betlem'), ('AUTHOR_FIRST_NAME', u'M.'), ('AUTHOR_LAST_NAME', u'de Lignie'), ('AUTHOR_FIRST_NAME', u'M.'), ('AUTHOR_LAST_NAME', u'Langbroek'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'van Vliet'), ('JOURNAL', u'A'), ('JOURNAL', u'A'), ('VOLUME', u'327'), ('YEAR', u'1997'), ('PAGE', u'1242-1252')],
[('AUTHOR_FIRST_NAME', u'T.J.'), ('AUTHOR_LAST_NAME', u'Jopek'), ('AUTHOR_FIRST_NAME', u'Z.'), ('AUTHOR_LAST_NAME', u'Kauchov'), ('JOURNAL', u'Planetary'), ('JOURNAL', u'Space'), ('JOURNAL', u'Sci.'), ('VOLUME', u'143'), ('YEAR', u'2017'), ('PAGE', u'2-6')],
[('AUTHOR_FIRST_NAME', u'T.J.'), ('AUTHOR_LAST_NAME', u'Jopek'), ('AUTHOR_FIRST_NAME', u'I.P.'), ('AUTHOR_LAST_NAME', u'Williams'), ('JOURNAL', u'MNRAS'), ('VOLUME', u'430'), ('YEAR', u'2013'), ('PAGE', u'2377-2389')],
[('AUTHOR_FIRST_NAME', u'M.R.'), ('AUTHOR_LAST_NAME', u'Kidger'), ('JOURNAL', u'Q.'), ('JOURNAL', u'J.R.'), ('JOURNAL', u'Astron.'), ('JOURNAL', u'Soc.'), ('VOLUME', u'34'), ('YEAR', u'1993'), ('PAGE', u'331-334')],
[('AUTHOR_FIRST_NAME', u'G.W.'), ('AUTHOR_LAST_NAME', u'Kronk'), ('TITLE', u'Meteor'), ('TITLE', u'Showers.'), ('TITLE', u'An'), ('TITLE', u'annotated'), ('TITLE', u'Catalog:'), ('PUBLISHER', u'Springer'), ('YEAR', u'2014')],
[('AUTHOR_FIRST_NAME', u'M.J.'), ('AUTHOR_LAST_NAME', u'Martnez'), ('AUTHOR_FIRST_NAME', u'F.J.'), ('AUTHOR_LAST_NAME', u'Marco'), ('JOURNAL', u'J.'), ('JOURNAL', u'History'), ('JOURNAL', u'Astron.'), ('VOLUME', u'48'), ('YEAR', u'2017'), ('PAGE', u'62-120')],
[('AUTHOR_FIRST_NAME', u'H.A.'), ('AUTHOR_LAST_NAME', u'Newton'), ('TITLE', u'The'), ('TITLE', u'original'), ('TITLE', u'accounts'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'displays'), ('TITLE', u'in'), ('TITLE', u'former'), ('TITLE', u'times'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'November'), ('TITLE', u'star-'), ('TITLE', u'shower:'), ('JOURNAL', u'Am.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Sci'), ('JOURNAL', u'Arts.'), ('VOLUME', u'37'), ('YEAR', u'1864'), ('PAGE', u'377-389')],
[('AUTHOR_FIRST_NAME', u'D.'), ('AUTHOR_LAST_NAME', u'Pankenier'), ('AUTHOR_FIRST_NAME', u'Zhentao'), ('AUTHOR_LAST_NAME', u'Xu'), ('AUTHOR_FIRST_NAME', u'Yaotiao'), ('AUTHOR_LAST_NAME', u'Jiang'), ('TITLE', u'Archaeoastronomy'), ('TITLE', u'in'), ('TITLE', u'East'), ('TITLE', u'Asia:'), ('TITLE', u'Historical'), ('TITLE', u'Observational'), ('TITLE', u'Records'), ('TITLE', u'of'), ('TITLE', u'Comets'), ('TITLE', u'and'), ('TITLE', u'Meteor'), ('TITLE', u'Showers'), ('TITLE', u'from'), ('TITLE', u'China:'), ('PUBLISHER', u'Cambria'), ('PUBLISHER', u'Press'), ('YEAR', u'2008')],
[('AUTHOR_COLLABORATION', u'PMH'), ('JOURNAL', u'Portugale'), ('JOURNAL', u'Monumenta'), ('JOURNAL', u'Historica'), ('YEAR', u'1856')],
[('AUTHOR_FIRST_NAME', u'A.'), ('AUTHOR_LAST_NAME', u'Quetelet'), ('TITLE', u'Catalogue'), ('TITLE', u'Nouveau'), ('TITLE', u'des'), ('TITLE', u'principals'), ('TITLE', u'aparitions'), ('TITLE', u'dtoiles'), ('TITLE', u'filantes:'), ('JOURNAL', u'Memoires'), ('JOURNAL', u'de'), ('JOURNAL', u"I'Academie"), ('JOURNAL', u'Royale'), ('JOURNAL', u'des'), ('JOURNAL', u'Sciences'), ('JOURNAL', u'et'), ('JOURNAL', u'Belles-'), ('JOURNAL', u'Lettres'), ('JOURNAL', u'de'), ('JOURNAL', u'Bruxelles'), ('VOLUME', u'15'), ('YEAR', u'1841'), ('PAGE', u'21-60')],
[('AUTHOR_FIRST_NAME', u'W.S.'), ('AUTHOR_LAST_NAME', u'Rada'), ('AUTHOR_FIRST_NAME', u'F.R.'), ('AUTHOR_LAST_NAME', u'Stephenson'), ('JOURNAL', u'Q.'), ('JOURNAL', u'J.R.'), ('JOURNAL', u'Astron.'), ('JOURNAL', u'Soc.'), ('VOLUME', u'33'), ('YEAR', u'1992'), ('PAGE', u'5-16')],
[('AUTHOR_FIRST_NAME', u'J'), ('AUTHOR_LAST_NAME', u'Toth'), ('TITLE', u'Meteoroids'), ('TITLE', u'1998:'), ('PAGE', u'223-226')],
[('AUTHOR_FIRST_NAME', u'P.'), ('AUTHOR_LAST_NAME', u'Wiegert'), ('AUTHOR_FIRST_NAME', u'P.'), ('AUTHOR_LAST_NAME', u'Brown'), ('JOURNAL', u'Earth'), ('JOURNAL', u'Moon'), ('JOURNAL', u'Planets'), ('VOLUME', u'95'), ('YEAR', u'2004'), ('PAGE', u'81-88')],
[('AUTHOR_FIRST_NAME', u'I.P.'), ('AUTHOR_LAST_NAME', u'Williams'), ('JOURNAL', u'Astron.'), ('JOURNAL', u'Geophys.'), ('VOLUME', u'52'), ('ISSUE', u'2'), ('YEAR', u'2011'), ('PAGE', u'20-26')],
[('AUTHOR_FIRST_NAME', u'I.P.'), ('AUTHOR_LAST_NAME', u'Williams'), ('AUTHOR_FIRST_NAME', u'S.'), ('AUTHOR_LAST_NAME', u'Collander-Brown'), ('JOURNAL', u'MNRAS'), ('VOLUME', u'294'), ('YEAR', u'1998'), ('PAGE', u'127-138')],
[('AUTHOR_FIRST_NAME', u'I.P.'), ('AUTHOR_LAST_NAME', u'Williams'), ('AUTHOR_FIRST_NAME', u'G.O.'), ('AUTHOR_LAST_NAME', u'Ryabovs'), ('AUTHOR_FIRST_NAME', u'A.P.'), ('AUTHOR_LAST_NAME', u'Baturin'), ('AUTHOR_FIRST_NAME', u'A.M.'), ('AUTHOR_LAST_NAME', u'Chernitsov'), ('JOURNAL', u'MNRAS'), ('VOLUME', u'355'), ('YEAR', u'2004'), ('PAGE', u'1171-1181')],
[('AUTHOR_FIRST_NAME', u'I.P.'), ('AUTHOR_LAST_NAME', u'Williams'), ('AUTHOR_FIRST_NAME', u'Z.'), ('AUTHOR_LAST_NAME', u'Wu'), ('JOURNAL', u'MNRAS'), ('VOLUME', u'264'), ('YEAR', u'1993'), ('PAGE', u'659-664')],
[('AUTHOR_FIRST_NAME', u'I.P.'), ('AUTHOR_LAST_NAME', u'Williams'), ('AUTHOR_FIRST_NAME', u'C.D.'), ('AUTHOR_LAST_NAME', u'Murray'), ('AUTHOR_FIRST_NAME', u'D.W.'), ('AUTHOR_LAST_NAME', u'Hughes'), ('JOURNAL', u'MNRAS'), ('VOLUME', u'189'), ('YEAR', u'1979'), ('PAGE', u'483-492')],
[('AUTHOR_FIRST_NAME', u'Z.'), ('AUTHOR_LAST_NAME', u'Wu'), ('AUTHOR_FIRST_NAME', u'I.P.'), ('AUTHOR_LAST_NAME', u'Williams'), ('JOURNAL', u'MNRAS'), ('VOLUME', u'280'), ('YEAR', u'1996'), ('PAGE', u'1210-1218')],
[('AUTHOR_FIRST_NAME', u'H.J.'), ('AUTHOR_LAST_NAME', u'Yang'), ('AUTHOR_FIRST_NAME', u'Ch.'), ('AUTHOR_LAST_NAME', u'Park'), ('AUTHOR_FIRST_NAME', u'M.'), ('AUTHOR_LAST_NAME', u'Park'), ('JOURNAL', u'Icarus'), ('VOLUME', u'175'), ('YEAR', u'2005'), ('PAGE', u'215-225')],
[('AUTHOR_FIRST_NAME', u'K'), ('AUTHOR_LAST_NAME', u'Yau'), ('AUTHOR_FIRST_NAME', u'D.'), ('AUTHOR_LAST_NAME', u'Yeomans'), ('AUTHOR_FIRST_NAME', u'P.'), ('AUTHOR_LAST_NAME', u'Weismann'), ('JOURNAL', u'MNRAS'), ('VOLUME', u'266'), ('YEAR', u'1994'), ('PAGE', u'305-316')],
[('AUTHOR_FIRST_NAME', u'D.K.'), ('AUTHOR_LAST_NAME', u'Yeomans'), ('AUTHOR_FIRST_NAME', u'K.K.'), ('AUTHOR_LAST_NAME', u'Yau'), ('AUTHOR_FIRST_NAME', u'P.R.'), ('AUTHOR_LAST_NAME', u'Weismann'), ('JOURNAL', u'Icarus'), ('VOLUME', u'124'), ('YEAR', u'1996'), ('PAGE', u'407-413')],
[('AUTHOR_FIRST_NAME', u'L.'), ('AUTHOR_LAST_NAME', u'Yrjla'), ('AUTHOR_FIRST_NAME', u'J.'), ('AUTHOR_LAST_NAME', u'Jenniskens'), ('JOURNAL', u'A'), ('JOURNAL', u'A'), ('VOLUME', u'330'), ('YEAR', u'1998'), ('PAGE', u'739-752')]],
[[('AUTHOR_LAST_NAME', u'Prusiner'), ('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_MIDDLE_NAME', u'B'), ('TITLE', u'Prions'), ('JOURNAL', u'Proc'), ('JOURNAL', u'Natl'), ('JOURNAL', u'Acad'), ('JOURNAL', u'Sci'), ('JOURNAL', u'U'), ('JOURNAL', u'S'), ('JOURNAL', u'A'), ('VOLUME', u'95'), ('YEAR', u'1998'), ('PAGE', u'13363'), ('DOI', u'10.1073/pnas.95.23.13363'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Aguzzi'), ('AUTHOR_FIRST_NAME', u'A'), ('TITLE', u'Molecular'), ('TITLE', u'mechanisms'), ('TITLE', u'of'), ('TITLE', u'prion'), ('TITLE', u'pathogenesis'), ('JOURNAL', u'Annu'), ('JOURNAL', u'Rev'), ('JOURNAL', u'Pathol'), ('VOLUME', u'3'), ('YEAR', u'2008'), ('PAGE', u'11'), ('DOI', u'10.1146/annurev.pathmechdis.3.121806.154326'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Soto'), ('AUTHOR_FIRST_NAME', u'C'), ('TITLE', u'Prion'), ('TITLE', u'hypothesis:'), ('TITLE', u'the'), ('TITLE', u'end'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'controversy?'), ('JOURNAL', u'Trends'), ('JOURNAL', u'Biochem'), ('JOURNAL', u'Sci'), ('VOLUME', u'36'), ('YEAR', u'2011'), ('PAGE', u'151'), ('DOI', u'10.1016/j.tibs.2010.11.001'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Ma'), ('AUTHOR_FIRST_NAME', u'J'), ('TITLE', u'The'), ('TITLE', u'role'), ('TITLE', u'of'), ('TITLE', u'cofactors'), ('TITLE', u'in'), ('TITLE', u'prion'), ('TITLE', u'propagation'), ('TITLE', u'and'), ('TITLE', u'infectivity'), ('JOURNAL', u'PLoS'), ('JOURNAL', u'Pathog'), ('VOLUME', u'8'), ('YEAR', u'2012'), ('PAGE', u'e1002589'), ('DOI', u'10.1371/journal.ppat.1002589'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Aguzzi'), ('AUTHOR_FIRST_NAME', u'A'), ('TITLE', u'The'), ('TITLE', u'prions'), ('TITLE', u'elusive'), ('TITLE', u'reason'), ('TITLE', u'for'), ('TITLE', u'being'), ('JOURNAL', u'Annu'), ('JOURNAL', u'Rev'), ('JOURNAL', u'Neurosci'), ('VOLUME', u'31'), ('YEAR', u'2008'), ('PAGE', u'439'), ('DOI', u'10.1146/annurev.neuro.31.060407.125620'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Roucou'), ('AUTHOR_FIRST_NAME', u'X'), ('TITLE', u'Cellular'), ('TITLE', u'prion'), ('TITLE', u'protein'), ('TITLE', u'neuroprotective'), ('TITLE', u'function:'), ('TITLE', u'implications'), ('TITLE', u'in'), ('TITLE', u'prion'), ('TITLE', u'diseases'), ('JOURNAL', u'J'), ('JOURNAL', u'Mol'), ('JOURNAL', u'Med'), ('JOURNAL', u'(Berl)'), ('VOLUME', u'83'), ('YEAR', u'2005'), ('PAGE', u'3'), ('DOI', u'10.1007/s00109-004-0605-5'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Singh'), ('AUTHOR_FIRST_NAME', u'N'), ('TITLE', u'Redox'), ('TITLE', u'control'), ('TITLE', u'of'), ('TITLE', u'prion'), ('TITLE', u'and'), ('TITLE', u'disease'), ('TITLE', u'pathogenesis'), ('JOURNAL', u'Antioxid'), ('JOURNAL', u'Redox'), ('JOURNAL', u'Signal'), ('VOLUME', u'12'), ('YEAR', u'2010'), ('PAGE', u'1271'), ('DOI', u'10.1089/ars.2009.2628'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Pan'), ('AUTHOR_FIRST_NAME', u'Y'), ('TITLE', u'Cellular'), ('TITLE', u'prion'), ('TITLE', u'protein'), ('TITLE', u'promotes'), ('TITLE', u'invasion'), ('TITLE', u'and'), ('TITLE', u'metastasis'), ('TITLE', u'of'), ('TITLE', u'gastric'), ('TITLE', u'cancer'), ('JOURNAL', u'FASEB'), ('JOURNAL', u'J'), ('VOLUME', u'20'), ('YEAR', u'2006'), ('PAGE', u'1886'), ('DOI', u'10.1096/fj.06-6138fje'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Liang'), ('AUTHOR_FIRST_NAME', u'J'), ('TITLE', u'Cellular'), ('TITLE', u'prion'), ('TITLE', u'protein'), ('TITLE', u'promotes'), ('TITLE', u'proliferation'), ('TITLE', u'and'), ('TITLE', u'G1/S'), ('TITLE', u'transition'), ('TITLE', u'of'), ('TITLE', u'human'), ('TITLE', u'gastric'), ('TITLE', u'cancer'), ('TITLE', u'cells'), ('TITLE', u'SGC7901'), ('TITLE', u'and'), ('TITLE', u'AGS'), ('JOURNAL', u'FASEB'), ('JOURNAL', u'J'), ('VOLUME', u'21'), ('YEAR', u'2007'), ('PAGE', u'2247'), ('DOI', u'10.1096/fj.06-7799com'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Sollazzo'), ('AUTHOR_FIRST_NAME', u'V'), ('TITLE', u'Prion'), ('TITLE', u'proteins'), ('TITLE', u'(PRNP'), ('TITLE', u'and'), ('TITLE', u'PRND)'), ('TITLE', u'are'), ('TITLE', u'over-'), ('TITLE', u'expressed'), ('TITLE', u'in'), ('TITLE', u'osteosarcoma'), ('JOURNAL', u'J'), ('JOURNAL', u'Orthop'), ('JOURNAL', u'Res'), ('VOLUME', u'30'), ('YEAR', u'2012'), ('PAGE', u'1004'), ('DOI', u'10.1002/jor.22034'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Meslin'), ('AUTHOR_FIRST_NAME', u'F'), ('TITLE', u'Efficacy'), ('TITLE', u'of'), ('TITLE', u'adjuvant'), ('TITLE', u'chemotherapy'), ('TITLE', u'according'), ('TITLE', u'to'), ('TITLE', u'Prion'), ('TITLE', u'protein'), ('TITLE', u'expression'), ('TITLE', u'in'), ('TITLE', u'patients'), ('TITLE', u'with'), ('TITLE', u'estrogen'), ('TITLE', u'receptor-'), ('TITLE', u'negative'), ('TITLE', u'breast'), ('TITLE', u'cancer'), ('JOURNAL', u'Ann'), ('JOURNAL', u'Oncol'), ('VOLUME', u'18'), ('YEAR', u'2007'), ('PAGE', u'1793'), ('DOI', u'10.1093/annonc/mdm406'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Li'), ('AUTHOR_FIRST_NAME', u'C'), ('TITLE', u'Pro-'), ('TITLE', u'prion'), ('TITLE', u'binds'), ('TITLE', u'filamin'), ('TITLE', u'A,'), ('TITLE', u'facilitating'), ('TITLE', u'its'), ('TITLE', u'interaction'), ('TITLE', u'with'), ('TITLE', u'integrin'), ('TITLE', u'beta1,'), ('TITLE', u'and'), ('TITLE', u'contributes'), ('TITLE', u'to'), ('TITLE', u'melanomagenesis'), ('JOURNAL', u'J'), ('JOURNAL', u'Biol'), ('JOURNAL', u'Chem'), ('VOLUME', u'285'), ('YEAR', u'2010'), ('PAGE', u'30328'), ('DOI', u'10.1074/jbc.M110.147413'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Li'), ('AUTHOR_FIRST_NAME', u'C'), ('TITLE', u'Binding'), ('TITLE', u'of'), ('TITLE', u'pro-'), ('TITLE', u'prion'), ('TITLE', u'to'), ('TITLE', u'filamin'), ('TITLE', u'A'), ('TITLE', u'disrupts'), ('TITLE', u'cytoskeleton'), ('TITLE', u'and'), ('TITLE', u'correlates'), ('TITLE', u'with'), ('TITLE', u'poor'), ('TITLE', u'prognosis'), ('TITLE', u'in'), ('TITLE', u'pancreatic'), ('TITLE', u'cancer'), ('JOURNAL', u'J'), ('JOURNAL', u'Clin'), ('JOURNAL', u'Invest'), ('VOLUME', u'119'), ('YEAR', u'2009'), ('PAGE', u'2725'), ('DOI', u'10.1172/JCI39542'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Mehrpour'), ('AUTHOR_FIRST_NAME', u'M'), ('TITLE', u'Prion'), ('TITLE', u'protein:'), ('TITLE', u'From'), ('TITLE', u'physiology'), ('TITLE', u'to'), ('TITLE', u'cancer'), ('TITLE', u'biology'), ('JOURNAL', u'Cancer'), ('JOURNAL', u'Lett'), ('VOLUME', u'290'), ('YEAR', u'2010'), ('PAGE', u'1'), ('DOI', u'10.1016/j.canlet.2009.07.009'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Li'), ('AUTHOR_FIRST_NAME', u'Q'), ('AUTHOR_MIDDLE_NAME', u'Q'), ('TITLE', u'The'), ('TITLE', u'role'), ('TITLE', u'of'), ('TITLE', u'P-'), ('TITLE', u'glycoprotein/cellular'), ('TITLE', u'prion'), ('TITLE', u'protein'), ('TITLE', u'interaction'), ('TITLE', u'in'), ('TITLE', u'multidrug-'), ('TITLE', u'resistant'), ('TITLE', u'breast'), ('TITLE', u'cancer'), ('TITLE', u'cells'), ('TITLE', u'treated'), ('TITLE', u'with'), ('TITLE', u'paclitaxel'), ('JOURNAL', u'Cell'), ('JOURNAL', u'Mol'), ('JOURNAL', u'Life'), ('JOURNAL', u'Sci'), ('VOLUME', u'66'), ('YEAR', u'2009'), ('PAGE', u'504'), ('DOI', u'10.1007/s00018-008-8548-6'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Diarra-Mehrpour'), ('AUTHOR_FIRST_NAME', u'M'), ('TITLE', u'Prion'), ('TITLE', u'protein'), ('TITLE', u'prevents'), ('TITLE', u'human'), ('TITLE', u'breast'), ('TITLE', u'carcinoma'), ('TITLE', u'cell'), ('TITLE', u'line'), ('TITLE', u'from'), ('TITLE', u'tumor'), ('TITLE', u'necrosis'), ('TITLE', u'factor'), ('TITLE', u'alpha-'), ('TITLE', u'induced'), ('TITLE', u'cell'), ('TITLE', u'death'), ('JOURNAL', u'Cancer'), ('JOURNAL', u'Res'), ('VOLUME', u'64'), ('YEAR', u'2004'), ('PAGE', u'719'), ('DOI', u'10.1158/0008-5472.CAN-03-1735'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Meslin'), ('AUTHOR_FIRST_NAME', u'F'), ('TITLE', u'Silencing'), ('TITLE', u'of'), ('TITLE', u'prion'), ('TITLE', u'protein'), ('TITLE', u'sensitizes'), ('TITLE', u'breast'), ('TITLE', u'adriamycin-'), ('TITLE', u'resistant'), ('TITLE', u'carcinoma'), ('TITLE', u'cells'), ('TITLE', u'to'), ('TITLE', u'TRAIL-'), ('TITLE', u'mediated'), ('TITLE', u'cell'), ('TITLE', u'death'), ('JOURNAL', u'Cancer'), ('JOURNAL', u'Res'), ('VOLUME', u'67'), ('YEAR', u'2007'), ('PAGE', u'10910'), ('DOI', u'10.1158/0008-5472.CAN-07-0512'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Roucou'), ('AUTHOR_FIRST_NAME', u'X'), ('TITLE', u'Cellular'), ('TITLE', u'prion'), ('TITLE', u'protein'), ('TITLE', u'inhibits'), ('TITLE', u'proapoptotic'), ('TITLE', u'Bax'), ('TITLE', u'conformational'), ('TITLE', u'change'), ('TITLE', u'in'), ('TITLE', u'human'), ('TITLE', u'neurons'), ('TITLE', u'and'), ('TITLE', u'in'), ('TITLE', u'breast'), ('TITLE', u'carcinoma'), ('TITLE', u'MCF-'), ('TITLE', u'7'), ('TITLE', u'cells'), ('JOURNAL', u'Cell'), ('JOURNAL', u'Death'), ('JOURNAL', u'Differ'), ('VOLUME', u'12'), ('YEAR', u'2005'), ('PAGE', u'783'), ('DOI', u'10.1038/sj.cdd.4401629'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Wang'), ('AUTHOR_FIRST_NAME', u'N'), ('TITLE', u'Quinoprotein'), ('TITLE', u'adducts'), ('TITLE', u'accumulate'), ('TITLE', u'in'), ('TITLE', u'the'), ('TITLE', u'substantia'), ('TITLE', u'nigra'), ('TITLE', u'of'), ('TITLE', u'aged'), ('TITLE', u'rats'), ('TITLE', u'and'), ('TITLE', u'correlate'), ('TITLE', u'with'), ('TITLE', u'dopamine-'), ('TITLE', u'induced'), ('TITLE', u'toxicity'), ('TITLE', u'in'), ('TITLE', u'SH-'), ('TITLE', u'SY5Y'), ('TITLE', u'cells'), ('JOURNAL', u'Neurochem'), ('JOURNAL', u'Res'), ('VOLUME', u'36'), ('YEAR', u'2011'), ('PAGE', u'2169'), ('DOI', u'10.1007/s11064-011-0541-z'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Kuwahara'), ('AUTHOR_FIRST_NAME', u'C'), ('TITLE', u'Prions'), ('TITLE', u'prevent'), ('TITLE', u'neuronal'), ('TITLE', u'cell-'), ('TITLE', u'line'), ('TITLE', u'death'), ('JOURNAL', u'Nature'), ('VOLUME', u'400'), ('YEAR', u'1999'), ('PAGE', u'225'), ('DOI', u'10.1038/22241'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Kim'), ('AUTHOR_FIRST_NAME', u'B'), ('AUTHOR_MIDDLE_NAME', u'H'), ('TITLE', u'The'), ('TITLE', u'cellular'), ('TITLE', u'prion'), ('TITLE', u'protein'), ('TITLE', u'(PrPC)'), ('TITLE', u'prevents'), ('TITLE', u'apoptotic'), ('TITLE', u'neuronal'), ('TITLE', u'cell'), ('TITLE', u'death'), ('TITLE', u'and'), ('TITLE', u'mitochondrial'), ('TITLE', u'dysfunction'), ('TITLE', u'induced'), ('TITLE', u'by'), ('TITLE', u'serum'), ('TITLE', u'deprivation'), ('JOURNAL', u'Brain'), ('JOURNAL', u'Res'), ('JOURNAL', u'Mol'), ('JOURNAL', u'Brain'), ('JOURNAL', u'Res'), ('VOLUME', u'124'), ('YEAR', u'2004'), ('PAGE', u'40'), ('DOI', u'10.1016/j.molbrainres.2004.02.005'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Shyu'), ('AUTHOR_FIRST_NAME', u'W'), ('AUTHOR_MIDDLE_NAME', u'C'), ('TITLE', u'Molecular'), ('TITLE', u'modulation'), ('TITLE', u'of'), ('TITLE', u'expression'), ('TITLE', u'of'), ('TITLE', u'prion'), ('TITLE', u'protein'), ('TITLE', u'by'), ('TITLE', u'heat'), ('TITLE', u'shock'), ('JOURNAL', u'Mol'), ('JOURNAL', u'Neurobiol'), ('VOLUME', u'26'), ('YEAR', u'2002'), ('PAGE', u'1'), ('DOI', u'10.1385/MN:26:1:001'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Williams'), ('AUTHOR_FIRST_NAME', u'W'), ('AUTHOR_MIDDLE_NAME', u'M'), ('TITLE', u'Ageing'), ('TITLE', u'and'), ('TITLE', u'exposure'), ('TITLE', u'to'), ('TITLE', u'oxidative'), ('TITLE', u'stress'), ('TITLE', u'in'), ('TITLE', u'vivo'), ('TITLE', u'differentially'), ('TITLE', u'affect'), ('TITLE', u'cellular'), ('TITLE', u'levels'), ('TITLE', u'of'), ('TITLE', u'PrP'), ('TITLE', u'in'), ('TITLE', u'mouse'), ('TITLE', u'cerebral'), ('TITLE', u'microvessels'), ('TITLE', u'and'), ('TITLE', u'brain'), ('TITLE', u'parenchyma'), ('JOURNAL', u'Neuropathol'), ('JOURNAL', u'Appl'), ('JOURNAL', u'Neurobiol'), ('VOLUME', u'30'), ('YEAR', u'2004'), ('PAGE', u'161'), ('DOI', u'10.1111/j.1365-2990.2003.00523.x'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Shyu'), ('AUTHOR_FIRST_NAME', u'W'), ('AUTHOR_MIDDLE_NAME', u'C'), ('TITLE', u'Hypoglycemia'), ('TITLE', u'enhances'), ('TITLE', u'the'), ('TITLE', u'expression'), ('TITLE', u'of'), ('TITLE', u'prion'), ('TITLE', u'protein'), ('TITLE', u'and'), ('TITLE', u'heat-'), ('TITLE', u'shock'), ('TITLE', u'protein'), ('TITLE', u'70'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'mouse'), ('TITLE', u'neuroblastoma'), ('TITLE', u'cell'), ('TITLE', u'line'), ('JOURNAL', u'J'), ('JOURNAL', u'Neurosci'), ('JOURNAL', u'Res'), ('VOLUME', u'80'), ('YEAR', u'2005'), ('PAGE', u'887'), ('DOI', u'10.1002/jnr.20509'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Podar'), ('AUTHOR_FIRST_NAME', u'K'), ('TITLE', u'A'), ('TITLE', u'pivotal'), ('TITLE', u'role'), ('TITLE', u'for'), ('TITLE', u'Mcl-'), ('TITLE', u'1'), ('TITLE', u'in'), ('TITLE', u'Bortezomib-'), ('TITLE', u'induced'), ('TITLE', u'apoptosis'), ('JOURNAL', u'Oncogene'), ('VOLUME', u'27'), ('YEAR', u'2008'), ('PAGE', u'721'), ('DOI', u'10.1038/sj.onc.1210679'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Tampio'), ('AUTHOR_FIRST_NAME', u'M'), ('TITLE', u'Induction'), ('TITLE', u'of'), ('TITLE', u'PUMA-'), ('TITLE', u'alpha'), ('TITLE', u'and'), ('TITLE', u'down-'), ('TITLE', u'regulation'), ('TITLE', u'of'), ('TITLE', u'PUMA-'), ('TITLE', u'beta'), ('TITLE', u'expression'), ('TITLE', u'is'), ('TITLE', u'associated'), ('TITLE', u'with'), ('TITLE', u'benzo(a)pyrene-'), ('TITLE', u'induced'), ('TITLE', u'apoptosis'), ('TITLE', u'in'), ('TITLE', u'MCF-'), ('TITLE', u'7'), ('TITLE', u'cells'), ('JOURNAL', u'Toxicol'), ('JOURNAL', u'Lett'), ('VOLUME', u'188'), ('YEAR', u'2009'), ('PAGE', u'214'), ('DOI', u'10.1016/j.toxlet.2009.04.016'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Sanz'), ('AUTHOR_FIRST_NAME', u'E'), ('TITLE', u'Anti-'), ('TITLE', u'apoptotic'), ('TITLE', u'effect'), ('TITLE', u'of'), ('TITLE', u'Mao-'), ('TITLE', u'B'), ('TITLE', u'inhibitor'), ('TITLE', u'PF9601N'), ('TITLE', u'[N-'), ('TITLE', u'(2-'), ('TITLE', u'propynyl)-'), ('TITLE', u'2-'), ('TITLE', u'(5-'), ('TITLE', u'benzyloxy-'), ('TITLE', u'indolyl)'), ('TITLE', u'methylamine]'), ('TITLE', u'is'), ('TITLE', u'mediated'), ('TITLE', u'by'), ('TITLE', u'p53'), ('TITLE', u'pathway'), ('TITLE', u'inhibition'), ('TITLE', u'in'), ('TITLE', u'MPP+'), ('TITLE', u'-'), ('TITLE', u'treated'), ('TITLE', u'SH-'), ('TITLE', u'SY5Y'), ('TITLE', u'human'), ('TITLE', u'dopaminergic'), ('TITLE', u'cells'), ('JOURNAL', u'J'), ('JOURNAL', u'Neurochem'), ('VOLUME', u'105'), ('YEAR', u'2008'), ('PAGE', u'2404'), ('DOI', u'10.1111/j.1471-4159.2008.05326.x'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Liu'), ('AUTHOR_FIRST_NAME', u'J'), ('TITLE', u'ERKs/p53'), ('TITLE', u'signal'), ('TITLE', u'transduction'), ('TITLE', u'pathway'), ('TITLE', u'is'), ('TITLE', u'involved'), ('TITLE', u'in'), ('TITLE', u'doxorubicin-'), ('TITLE', u'induced'), ('TITLE', u'apoptosis'), ('TITLE', u'in'), ('TITLE', u'H9c2'), ('TITLE', u'cells'), ('TITLE', u'and'), ('TITLE', u'cardiomyocytes'), ('JOURNAL', u'Am'), ('JOURNAL', u'J'), ('JOURNAL', u'Physiol'), ('JOURNAL', u'Heart'), ('JOURNAL', u'Circ'), ('JOURNAL', u'Physiol'), ('VOLUME', u'295'), ('YEAR', u'2008'), ('PAGE', u'H1956'), ('DOI', u'10.1152/ajpheart.00407.2008'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Paitel'), ('AUTHOR_FIRST_NAME', u'E'), ('TITLE', u'Cellular'), ('TITLE', u'prion'), ('TITLE', u'protein'), ('TITLE', u'sensitizes'), ('TITLE', u'neurons'), ('TITLE', u'to'), ('TITLE', u'apoptotic'), ('TITLE', u'stimuli'), ('TITLE', u'through'), ('TITLE', u'Mdm2-'), ('TITLE', u'regulated'), ('TITLE', u'and'), ('TITLE', u'p53-'), ('TITLE', u'dependent'), ('TITLE', u'caspase'), ('TITLE', u'3-'), ('TITLE', u'like'), ('TITLE', u'activation'), ('JOURNAL', u'J'), ('JOURNAL', u'Biol'), ('JOURNAL', u'Chem'), ('VOLUME', u'278'), ('YEAR', u'2003'), ('PAGE', u'10061'), ('DOI', u'10.1074/jbc.M211580200'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Paitel'), ('AUTHOR_FIRST_NAME', u'E'), ('TITLE', u'Primary'), ('TITLE', u'cultured'), ('TITLE', u'neurons'), ('TITLE', u'devoid'), ('TITLE', u'of'), ('TITLE', u'cellular'), ('TITLE', u'prion'), ('TITLE', u'display'), ('TITLE', u'lower'), ('TITLE', u'responsiveness'), ('TITLE', u'to'), ('TITLE', u'staurosporine'), ('TITLE', u'through'), ('TITLE', u'the'), ('TITLE', u'control'), ('TITLE', u'of'), ('TITLE', u'p53'), ('TITLE', u'at'), ('TITLE', u'both'), ('TITLE', u'transcriptional'), ('TITLE', u'and'), ('TITLE', u'post-'), ('TITLE', u'transcriptional'), ('TITLE', u'levels'), ('JOURNAL', u'J'), ('JOURNAL', u'Biol'), ('JOURNAL', u'Chem'), ('VOLUME', u'279'), ('YEAR', u'2004'), ('PAGE', u'612'), ('DOI', u'10.1074/jbc.M310453200'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Lacroix'), ('AUTHOR_FIRST_NAME', u'M'), ('TITLE', u'p53'), ('TITLE', u'and'), ('TITLE', u'breast'), ('TITLE', u'cancer,'), ('TITLE', u'an'), ('TITLE', u'update'), ('JOURNAL', u'Endocr'), ('JOURNAL', u'Relat'), ('JOURNAL', u'Cancer'), ('VOLUME', u'13'), ('YEAR', u'2006'), ('PAGE', u'293'), ('DOI', u'10.1677/erc.1.01172'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Cagnol'), ('AUTHOR_FIRST_NAME', u'S'), ('TITLE', u'ERK'), ('TITLE', u'and'), ('TITLE', u'cell'), ('TITLE', u'death:'), ('TITLE', u'mechanisms'), ('TITLE', u'of'), ('TITLE', u'ERK-'), ('TITLE', u'induced'), ('TITLE', u'cell'), ('TITLE', u'death-'), ('TITLE', u'apoptosis,'), ('TITLE', u'autophagy'), ('TITLE', u'and'), ('TITLE', u'senescence'), ('JOURNAL', u'FEBS'), ('JOURNAL', u'J'), ('VOLUME', u'277'), ('YEAR', u'2010'), ('PAGE', u'2'), ('DOI', u'10.1111/j.1742-4658.2009.07366.x'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Balmanno'), ('AUTHOR_FIRST_NAME', u'K'), ('TITLE', u'Tumour'), ('TITLE', u'cell'), ('TITLE', u'survival'), ('TITLE', u'signalling'), ('TITLE', u'by'), ('TITLE', u'the'), ('TITLE', u'ERK1/2'), ('TITLE', u'pathway'), ('JOURNAL', u'Cell'), ('JOURNAL', u'Death'), ('JOURNAL', u'Differ'), ('VOLUME', u'16'), ('YEAR', u'2009'), ('PAGE', u'368'), ('DOI', u'10.1038/cdd.2008.148'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Thomas'), ('AUTHOR_FIRST_NAME', u'R'), ('TITLE', u'HIF-'), ('TITLE', u'1'), ('TITLE', u'alpha:'), ('TITLE', u'a'), ('TITLE', u'key'), ('TITLE', u'survival'), ('TITLE', u'factor'), ('TITLE', u'for'), ('TITLE', u'serum-'), ('TITLE', u'deprived'), ('TITLE', u'prostate'), ('TITLE', u'cancer'), ('TITLE', u'cells'), ('JOURNAL', u'Prostate'), ('VOLUME', u'68'), ('YEAR', u'2008'), ('PAGE', u'1405'), ('DOI', u'10.1002/pros.20808'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Lee'), ('AUTHOR_FIRST_NAME', u'K'), ('TITLE', u'Anthracycline'), ('TITLE', u'chemotherapy'), ('TITLE', u'inhibits'), ('TITLE', u'HIF-'), ('TITLE', u'1'), ('TITLE', u'transcriptional'), ('TITLE', u'activity'), ('TITLE', u'and'), ('TITLE', u'tumor-'), ('TITLE', u'induced'), ('TITLE', u'mobilization'), ('TITLE', u'of'), ('TITLE', u'circulating'), ('TITLE', u'angiogenic'), ('TITLE', u'cells'), ('JOURNAL', u'Proc'), ('JOURNAL', u'Natl'), ('JOURNAL', u'Acad'), ('JOURNAL', u'Sci'), ('JOURNAL', u'U'), ('JOURNAL', u'S'), ('JOURNAL', u'A'), ('VOLUME', u'106'), ('YEAR', u'2009'), ('PAGE', u'2353'), ('DOI', u'10.1073/pnas.0812801106'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')],
[('AUTHOR_LAST_NAME', u'Anantharam'), ('AUTHOR_FIRST_NAME', u'V'), ('TITLE', u'Opposing'), ('TITLE', u'roles'), ('TITLE', u'of'), ('TITLE', u'prion'), ('TITLE', u'protein'), ('TITLE', u'in'), ('TITLE', u'oxidative'), ('TITLE', u'stress-'), ('TITLE', u'and'), ('TITLE', u'ER'), ('TITLE', u'stress-'), ('TITLE', u'induced'), ('TITLE', u'apoptotic'), ('TITLE', u'signaling'), ('JOURNAL', u'Free'), ('JOURNAL', u'Radic'), ('JOURNAL', u'Biol'), ('JOURNAL', u'Med'), ('VOLUME', u'45'), ('YEAR', u'2008'), ('PAGE', u'1530'), ('DOI', u'10.1016/j.freeradbiomed.2008.08.028'), ('REFPLAINTEXT', '?!?!'), ('REFSTR', '?!?!')]]]
train_3 = [[('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Aursand'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Napoli'), ('AUTHOR_FIRST_NAME', u'J'), ('AUTHOR_LAST_NAME', u'Ridder'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'dynamics'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'weak'), ('TITLE', u'Freedericksz'), ('TITLE', u'transition'), ('TITLE', u'for'), ('TITLE', u'nematic'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('JOURNAL', u'Commun.'), ('JOURNAL', u'Comput.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'20'), ('ISSUE', u'5'), ('YEAR', u'2016'), ('PAGE', u'1359'), ('DOI', u'10.4208/cicp.190615.090516a'), ('REFPLAINTEXT', u'Aursand, P., Napoli, G., Ridder, J.: On the dynamics of the weak Freedericksz transition for nematic liquid crystals. Commun. Comput. Phys. 20(5), 1359\u20131380 (2016)'), ('REFSTR', "{u'bibunstructured': u'Aursand, P., Napoli, G., Ridder, J.: On the dynamics of the weak Freedericksz transition for nematic liquid crystals. Commun. Comput. Phys. 20(5), 1359\\u20131380 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Aursand', u'initials': u'P'}, {u'familyname': u'Napoli', u'initials': u'G'}, {u'familyname': u'Ridder', u'initials': u'J'}], u'issueid': u'5', u'journaltitle': u'Commun. Comput. Phys.', u'volumeid': u'20', u'firstpage': u'1359', u'lastpage': u'1380', u'year': u'2016', u'articletitle': {u'#text': u'On the dynamics of the weak Freedericksz transition for nematic liquid crystals', u'@outputmedium': u'All', u'@language': u'En'}, u'occurrence': [{u'handle': u'3611798', u'@type': u'AMSID'}, {u'handle': u'10.4208/cicp.190615.090516a', u'@type': u'DOI'}]}, u'citationnumber': u'1.', u'@id': u'CR1'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Bevilacqua'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Napoli'), ('TITLE', u'Reexamination'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'HelfrichHurault'), ('TITLE', u'effect'), ('TITLE', u'in'), ('TITLE', u'smectic-'), ('TITLE', u'a'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'E'), ('VOLUME', u'72'), ('ISSUE', u'4'), ('YEAR', u'2005'), ('PAGE', u'041708'), ('REFPLAINTEXT', u'Bevilacqua, G., Napoli, G.: Reexamination of the Helfrich\u2013Hurault effect in smectic-a liquid crystals. Phys. Rev. E 72(4), 041708 (2005)'), ('REFSTR', "{u'bibunstructured': u'Bevilacqua, G., Napoli, G.: Reexamination of the Helfrich\\u2013Hurault effect in smectic-a liquid crystals. Phys. Rev. E 72(4), 041708 (2005)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Bevilacqua', u'initials': u'G'}, {u'familyname': u'Napoli', u'initials': u'G'}], u'issueid': u'4', u'journaltitle': u'Phys. Rev. E', u'volumeid': u'72', u'firstpage': u'041708', u'year': u'2005', u'articletitle': {u'#text': u'Reexamination of the Helfrich\\u2013Hurault effect in smectic-a liquid crystals', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1103/PhysRevE.72.041708', u'@type': u'DOI'}}, u'citationnumber': u'2.', u'@id': u'CR2'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Bevilacqua'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Napoli'), ('TITLE', u'Parity'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'weak'), ('TITLE', u'Fredericksz'), ('TITLE', u'transition'), ('JOURNAL', u'Eur.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'J.'), ('JOURNAL', u'E'), ('VOLUME', u'35'), ('ISSUE', u'12'), ('YEAR', u'2012'), ('PAGE', u'133'), ('REFPLAINTEXT', u'Bevilacqua, G., Napoli, G.: Parity of the weak Fr\xe9edericksz transition. Eur. Phys. J. E 35(12), 133 (2012)'), ('REFSTR', "{u'bibunstructured': u'Bevilacqua, G., Napoli, G.: Parity of the weak Fr\\xe9edericksz transition. Eur. Phys. J. E 35(12), 133 (2012)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Bevilacqua', u'initials': u'G'}, {u'familyname': u'Napoli', u'initials': u'G'}], u'issueid': u'12', u'journaltitle': u'Eur. Phys. J. E', u'volumeid': u'35', u'firstpage': u'133', u'year': u'2012', u'articletitle': {u'#text': u'Parity of the weak Fr\\xe9edericksz transition', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1140/epje/i2012-12133-7', u'@type': u'DOI'}}, u'citationnumber': u'3.', u'@id': u'CR3'}")],
[('AUTHOR_FIRST_NAME', u'NA'), ('AUTHOR_LAST_NAME', u'Clark'), ('AUTHOR_FIRST_NAME', u'RB'), ('AUTHOR_LAST_NAME', u'Meyer'), ('TITLE', u'Strain-'), ('TITLE', u'induced'), ('TITLE', u'instability'), ('TITLE', u'of'), ('TITLE', u'monodomain'), ('TITLE', u'smectic'), ('TITLE', u'a'), ('TITLE', u'and'), ('TITLE', u'cholesteric'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Lett.'), ('VOLUME', u'22'), ('ISSUE', u'10'), ('YEAR', u'1973'), ('PAGE', u'493'), ('REFPLAINTEXT', u'Clark, N.A., Meyer, R.B.: Strain-induced instability of monodomain smectic a and cholesteric liquid crystals. Appl. Phys. Lett. 22(10), 493\u2013494 (1973)'), ('REFSTR', "{u'bibunstructured': u'Clark, N.A., Meyer, R.B.: Strain-induced instability of monodomain smectic a and cholesteric liquid crystals. Appl. Phys. Lett. 22(10), 493\\u2013494 (1973)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Clark', u'initials': u'NA'}, {u'familyname': u'Meyer', u'initials': u'RB'}], u'issueid': u'10', u'journaltitle': u'Appl. Phys. Lett.', u'volumeid': u'22', u'firstpage': u'493', u'lastpage': u'494', u'year': u'1973', u'articletitle': {u'#text': u'Strain-induced instability of monodomain smectic a and cholesteric liquid crystals', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1063/1.1654481', u'@type': u'DOI'}}, u'citationnumber': u'4.', u'@id': u'CR4'}")],
[('AUTHOR_FIRST_NAME', u'R'), ('AUTHOR_LAST_NAME', u'Vita'), ('AUTHOR_FIRST_NAME', u'IW'), ('AUTHOR_LAST_NAME', u'Stewart'), ('TITLE', u'Influence'), ('TITLE', u'of'), ('TITLE', u'weak'), ('TITLE', u'anchoring'), ('TITLE', u'upon'), ('TITLE', u'the'), ('TITLE', u'alignment'), ('TITLE', u'of'), ('TITLE', u'smectic'), ('TITLE', u'a'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('TITLE', u'with'), ('TITLE', u'surface'), ('TITLE', u'pretilt'), ('JOURNAL', u'J.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Condens.'), ('JOURNAL', u'Matter'), ('VOLUME', u'20'), ('ISSUE', u'33'), ('YEAR', u'2008'), ('PAGE', u'335101'), ('REFPLAINTEXT', u'De Vita, R., Stewart, I.W.: Influence of weak anchoring upon the alignment of smectic a liquid crystals with surface pretilt. J. Phys. Condens. Matter 20(33), 335101 (2008)'), ('REFSTR', "{u'bibunstructured': u'De Vita, R., Stewart, I.W.: Influence of weak anchoring upon the alignment of smectic a liquid crystals with surface pretilt. J. Phys. Condens. Matter 20(33), 335101 (2008)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Vita', u'particle': u'De', u'initials': u'R'}, {u'familyname': u'Stewart', u'initials': u'IW'}], u'issueid': u'33', u'journaltitle': u'J. Phys. Condens. Matter', u'volumeid': u'20', u'firstpage': u'335101', u'year': u'2008', u'articletitle': {u'#text': u'Influence of weak anchoring upon the alignment of smectic a liquid crystals with surface pretilt', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1088/0953-8984/20/33/335101', u'@type': u'DOI'}}, u'citationnumber': u'5.', u'@id': u'CR5'}")],
[('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Gennes'), ('AUTHOR_FIRST_NAME', u'J'), ('AUTHOR_LAST_NAME', u'Prost'), ('YEAR', u'1993'), ('PUBLISHER', u'The'), ('PUBLISHER', u'Physics'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Liquid'), ('PUBLISHER', u'Crystals'), ('VOLUME', u'2'), ('REFPLAINTEXT', u'de Gennes, P., Prost, J.: The Physics of Liquid Crystals, 2nd edn. Clarendon Press, Oxford (1993)'), ('REFSTR', "{u'bibunstructured': u'de Gennes, P., Prost, J.: The Physics of Liquid Crystals, 2nd edn. Clarendon Press, Oxford (1993)', u'citationnumber': u'6.', u'@id': u'CR6', u'bibbook': {u'bibauthorname': [{u'familyname': u'Gennes', u'particle': u'de', u'initials': u'P'}, {u'familyname': u'Prost', u'initials': u'J'}], u'publisherlocation': u'Oxford', u'booktitle': u'The Physics of Liquid Crystals', u'year': u'1993', u'editionnumber': u'2', u'publishername': u'Clarendon Press'}}")],
[('AUTHOR_FIRST_NAME', u'R'), ('AUTHOR_LAST_NAME', u'Pascalis'), ('TITLE', u'Mechanically'), ('TITLE', u'induced'), ('TITLE', u'HelfrichHurault'), ('TITLE', u'effect'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'confined'), ('TITLE', u'lamellar'), ('TITLE', u'system'), ('TITLE', u'with'), ('TITLE', u'finite'), ('TITLE', u'surface'), ('TITLE', u'anchoring'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'E'), ('VOLUME', u'100'), ('ISSUE', u'1'), ('YEAR', u'2019'), ('PAGE', u'012705'), ('REFPLAINTEXT', u'De Pascalis, R.: Mechanically induced Helfrich\u2013Hurault effect in a confined lamellar system with finite surface anchoring. Phys. Rev. E 100(1), 012705 (2019)'), ('REFSTR', "{u'bibunstructured': u'De Pascalis, R.: Mechanically induced Helfrich\\u2013Hurault effect in a confined lamellar system with finite surface anchoring. Phys. Rev. E 100(1), 012705 (2019)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Pascalis', u'particle': u'De', u'initials': u'R'}, u'issueid': u'1', u'journaltitle': u'Phys. Rev. E', u'volumeid': u'100', u'firstpage': u'012705', u'year': u'2019', u'articletitle': {u'#text': u'Mechanically induced Helfrich\\u2013Hurault effect in a confined lamellar system with finite surface anchoring', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1103/PhysRevE.100.012705', u'@type': u'DOI'}}, u'citationnumber': u'7.', u'@id': u'CR7'}")],
[('AUTHOR_FIRST_NAME', u'H'), ('AUTHOR_LAST_NAME', u'Deuling'), ('TITLE', u'Deformation'), ('TITLE', u'of'), ('TITLE', u'nematic'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('TITLE', u'in'), ('TITLE', u'an'), ('TITLE', u'electric'), ('TITLE', u'field'), ('JOURNAL', u'Mol.'), ('JOURNAL', u'Cryst.'), ('JOURNAL', u'Liq.'), ('JOURNAL', u'Cryst.'), ('VOLUME', u'19'), ('YEAR', u'1972'), ('PAGE', u'123'), ('REFPLAINTEXT', u'Deuling, H.: Deformation of nematic liquid crystals in an electric field. Mol. Cryst. Liq. Cryst. 19, 123 (1972)'), ('REFSTR', "{u'bibunstructured': u'Deuling, H.: Deformation of nematic liquid crystals in an electric field. Mol. Cryst. Liq. Cryst. 19, 123 (1972)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Deuling', u'initials': u'H'}, u'occurrence': {u'handle': u'10.1080/15421407208083858', u'@type': u'DOI'}, u'journaltitle': u'Mol. Cryst. Liq. Cryst.', u'volumeid': u'19', u'firstpage': u'123', u'year': u'1972', u'articletitle': {u'#text': u'Deformation of nematic liquid crystals in an electric field', u'@language': u'En'}}, u'citationnumber': u'8.', u'@id': u'CR8'}")],
[('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'Elias'), ('AUTHOR_FIRST_NAME', u'C'), ('AUTHOR_LAST_NAME', u'Flament'), ('AUTHOR_FIRST_NAME', u'JC'), ('AUTHOR_LAST_NAME', u'Bacri'), ('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Neveau'), ('TITLE', u'Macro-'), ('TITLE', u'organized'), ('TITLE', u'patterns'), ('TITLE', u'in'), ('TITLE', u'ferrofluid'), ('TITLE', u'layer:'), ('TITLE', u'experimental'), ('TITLE', u'studies'), ('JOURNAL', u'J.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'I'), ('VOLUME', u'7'), ('YEAR', u'1997'), ('PAGE', u'711'), ('REFPLAINTEXT', u'Elias, F., Flament, C., Bacri, J.C., Neveau, S.: Macro-organized patterns in ferrofluid layer: experimental studies. J. Phys. I 7, 711 (1997)'), ('REFSTR', "{u'bibunstructured': u'Elias, F., Flament, C., Bacri, J.C., Neveau, S.: Macro-organized patterns in ferrofluid layer: experimental studies. J. Phys. I 7, 711 (1997)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Elias', u'initials': u'F'}, {u'familyname': u'Flament', u'initials': u'C'}, {u'familyname': u'Bacri', u'initials': u'JC'}, {u'familyname': u'Neveau', u'initials': u'S'}], u'journaltitle': u'J. Phys. I', u'volumeid': u'7', u'firstpage': u'711', u'year': u'1997', u'articletitle': {u'#text': u'Macro-organized patterns in ferrofluid layer: experimental studies', u'@language': u'En'}}, u'citationnumber': u'9.', u'@id': u'CR9'}")],
[('AUTHOR_FIRST_NAME', u'SJ'), ('AUTHOR_LAST_NAME', u'Elston'), ('TITLE', u'Smectic-'), ('TITLE', u'A'), ('TITLE', u'Fredericksz'), ('TITLE', u'transition'), ('JOURNAL', u'Phy.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'E'), ('VOLUME', u'58'), ('ISSUE', u'2'), ('YEAR', u'1998'), ('PAGE', u'R1215'), ('REFPLAINTEXT', u'Elston, S.J.: Smectic-A Fr\xe9edericksz transition. Phy. Rev. E 58(2), R1215\u2013R1217 (1998)'), ('REFSTR', "{u'bibunstructured': u'Elston, S.J.: Smectic-A Fr\\xe9edericksz transition. Phy. Rev. E 58(2), R1215\\u2013R1217 (1998)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Elston', u'initials': u'SJ'}, u'issueid': u'2', u'journaltitle': u'Phy. Rev. E', u'volumeid': u'58', u'firstpage': u'R1215', u'lastpage': u'R1217', u'year': u'1998', u'articletitle': {u'#text': u'Smectic-A Fr\\xe9edericksz transition', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1103/PhysRevE.58.R1215', u'@type': u'DOI'}}, u'citationnumber': u'10.', u'@id': u'CR10'}")],
[('AUTHOR_FIRST_NAME', u'CJ'), ('AUTHOR_LAST_NAME', u'Garca-Cervera'), ('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Joo'), ('TITLE', u'Analytic'), ('TITLE', u'description'), ('TITLE', u'of'), ('TITLE', u'layer'), ('TITLE', u'undulations'), ('TITLE', u'in'), ('TITLE', u'smectic'), ('TITLE', u'a'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('JOURNAL', u'Arch.'), ('JOURNAL', u'Ration.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Anal.'), ('VOLUME', u'203'), ('ISSUE', u'1'), ('YEAR', u'2012'), ('PAGE', u'1'), ('DOI', u'10.1007/s00205-011-0442-y'), ('REFPLAINTEXT', u'Garc\xeda-Cervera, C.J., Joo, S.: Analytic description of layer undulations in smectic a liquid crystals. Arch. Ration. Mech. Anal. 203(1), 1\u201343 (2012)'), ('REFSTR', "{u'bibunstructured': u'Garc\\xeda-Cervera, C.J., Joo, S.: Analytic description of layer undulations in smectic a liquid crystals. Arch. Ration. Mech. Anal. 203(1), 1\\u201343 (2012)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Garc\\xeda-Cervera', u'initials': u'CJ'}, {u'familyname': u'Joo', u'initials': u'S'}], u'issueid': u'1', u'journaltitle': u'Arch. Ration. Mech. Anal.', u'volumeid': u'203', u'firstpage': u'1', u'lastpage': u'43', u'year': u'2012', u'articletitle': {u'#text': u'Analytic description of layer undulations in smectic a liquid crystals', u'@language': u'En'}, u'occurrence': [{u'handle': u'2864406', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00205-011-0442-y', u'@type': u'DOI'}]}, u'citationnumber': u'11.', u'@id': u'CR11'}")],
[('AUTHOR_FIRST_NAME', u'W'), ('AUTHOR_LAST_NAME', u'Helfrich'), ('TITLE', u'Deformation'), ('TITLE', u'of'), ('TITLE', u'cholesteric'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('TITLE', u'with'), ('TITLE', u'low'), ('TITLE', u'threshold'), ('TITLE', u'voltage'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Lett.'), ('VOLUME', u'17'), ('ISSUE', u'12'), ('YEAR', u'1970'), ('PAGE', u'531'), ('REFPLAINTEXT', u'Helfrich, W.: Deformation of cholesteric liquid crystals with low threshold voltage. Appl. Phys. Lett. 17(12), 531\u2013532 (1970)'), ('REFSTR', "{u'bibunstructured': u'Helfrich, W.: Deformation of cholesteric liquid crystals with low threshold voltage. Appl. Phys. Lett. 17(12), 531\\u2013532 (1970)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Helfrich', u'initials': u'W'}, u'issueid': u'12', u'journaltitle': u'Appl. Phys. Lett.', u'volumeid': u'17', u'firstpage': u'531', u'lastpage': u'532', u'year': u'1970', u'articletitle': {u'#text': u'Deformation of cholesteric liquid crystals with low threshold voltage', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1063/1.1653297', u'@type': u'DOI'}}, u'citationnumber': u'12.', u'@id': u'CR12'}")],
[('AUTHOR_FIRST_NAME', u'J'), ('AUTHOR_LAST_NAME', u'Hurault'), ('TITLE', u'Static'), ('TITLE', u'distortions'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'cholesteric'), ('TITLE', u'planar'), ('TITLE', u'structure'), ('TITLE', u'induced'), ('TITLE', u'by'), ('TITLE', u'magnet'), ('TITLE', u'ic'), ('TITLE', u'or'), ('TITLE', u'ac'), ('TITLE', u'electric'), ('TITLE', u'fields'), ('JOURNAL', u'J.'), ('JOURNAL', u'Chem.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'59'), ('ISSUE', u'4'), ('YEAR', u'1973'), ('PAGE', u'2068'), ('REFPLAINTEXT', u'Hurault, J.: Static distortions of a cholesteric planar structure induced by magnet ic or ac electric fields. J. Chem. Phys. 59(4), 2068\u20132075 (1973)'), ('REFSTR', "{u'bibunstructured': u'Hurault, J.: Static distortions of a cholesteric planar structure induced by magnet ic or ac electric fields. J. Chem. Phys. 59(4), 2068\\u20132075 (1973)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Hurault', u'initials': u'J'}, u'issueid': u'4', u'journaltitle': u'J. Chem. Phys.', u'volumeid': u'59', u'firstpage': u'2068', u'lastpage': u'2075', u'year': u'1973', u'articletitle': {u'#text': u'Static distortions of a cholesteric planar structure induced by magnet ic or ac electric fields', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1063/1.1680293', u'@type': u'DOI'}}, u'citationnumber': u'13.', u'@id': u'CR13'}")],
[('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Ishikawa'), ('AUTHOR_FIRST_NAME', u'OD'), ('AUTHOR_LAST_NAME', u'Lavrentovich'), ('TITLE', u'Undulations'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'confined'), ('TITLE', u'lamellar'), ('TITLE', u'system'), ('TITLE', u'with'), ('TITLE', u'surface'), ('TITLE', u'anchoring'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'E'), ('VOLUME', u'63'), ('ISSUE', u'3'), ('YEAR', u'2001'), ('PAGE', u'030501'), ('REFPLAINTEXT', u'Ishikawa, T., Lavrentovich, O.D.: Undulations in a confined lamellar system with surface anchoring. Phys. Rev. E 63(3), 030501 (2001)'), ('REFSTR', "{u'bibunstructured': u'Ishikawa, T., Lavrentovich, O.D.: Undulations in a confined lamellar system with surface anchoring. Phys. Rev. E 63(3), 030501 (2001)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Ishikawa', u'initials': u'T'}, {u'familyname': u'Lavrentovich', u'initials': u'OD'}], u'issueid': u'3', u'journaltitle': u'Phys. Rev. E', u'volumeid': u'63', u'firstpage': u'030501', u'year': u'2001', u'articletitle': {u'#text': u'Undulations in a confined lamellar system with surface anchoring', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1103/PhysRevE.63.030501', u'@type': u'DOI'}}, u'citationnumber': u'14.', u'@id': u'CR14'}")],
[('AUTHOR_FIRST_NAME', u'PJ'), ('AUTHOR_LAST_NAME', u'Kedney'), ('AUTHOR_FIRST_NAME', u'IW'), ('AUTHOR_LAST_NAME', u'Stewart'), ('TITLE', u'The'), ('TITLE', u'onset'), ('TITLE', u'of'), ('TITLE', u'layer'), ('TITLE', u'deformations'), ('TITLE', u'in'), ('TITLE', u'non-'), ('TITLE', u'chiral'), ('TITLE', u'smectic'), ('TITLE', u'C'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('JOURNAL', u'ZAMP'), ('VOLUME', u'45'), ('ISSUE', u'6'), ('YEAR', u'1994'), ('PAGE', u'882'), ('REFPLAINTEXT', u'Kedney, P.J., Stewart, I.W.: The onset of layer deformations in non-chiral smectic C liquid crystals. ZAMP 45(6), 882\u2013898 (1994)'), ('REFSTR', "{u'bibunstructured': u'Kedney, P.J., Stewart, I.W.: The onset of layer deformations in non-chiral smectic C liquid crystals. ZAMP 45(6), 882\\u2013898 (1994)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Kedney', u'initials': u'PJ'}, {u'familyname': u'Stewart', u'initials': u'IW'}], u'issueid': u'6', u'journaltitle': u'ZAMP', u'volumeid': u'45', u'firstpage': u'882', u'lastpage': u'898', u'year': u'1994', u'articletitle': {u'#text': u'The onset of layer deformations in non-chiral smectic C liquid crystals', u'@language': u'En'}, u'occurrence': [{u'handle': u'1306938', u'@type': u'AMSID'}, {u'handle': u'0820.76009', u'@type': u'ZLBID'}]}, u'citationnumber': u'15.', u'@id': u'CR15'}")],
[('AUTHOR_FIRST_NAME', u'LV'), ('AUTHOR_LAST_NAME', u'Mirantsev'), ('TITLE', u'Dynamics'), ('TITLE', u'of'), ('TITLE', u'HelfrichHurault'), ('TITLE', u'deformations'), ('TITLE', u'in'), ('TITLE', u'smectic-'), ('TITLE', u'A'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('JOURNAL', u'Eur.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'J.'), ('JOURNAL', u'E'), ('VOLUME', u'38'), ('ISSUE', u'9'), ('YEAR', u'2015'), ('PAGE', u'104'), ('REFPLAINTEXT', u'Mirantsev, L.V.: Dynamics of Helfrich\u2013Hurault deformations in smectic-A liquid crystals. Eur. Phys. J. E 38(9), 104 (2015)'), ('REFSTR', "{u'bibunstructured': u'Mirantsev, L.V.: Dynamics of Helfrich\\u2013Hurault deformations in smectic-A liquid crystals. Eur. Phys. J. E 38(9), 104 (2015)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Mirantsev', u'initials': u'LV'}, u'issueid': u'9', u'journaltitle': u'Eur. Phys. J. E', u'volumeid': u'38', u'firstpage': u'104', u'year': u'2015', u'articletitle': {u'#text': u'Dynamics of Helfrich\\u2013Hurault deformations in smectic-A liquid crystals', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1140/epje/i2015-15104-6', u'@type': u'DOI'}}, u'citationnumber': u'16.', u'@id': u'CR16'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Napoli'), ('TITLE', u'Weak'), ('TITLE', u'anchoring'), ('TITLE', u'effects'), ('TITLE', u'in'), ('TITLE', u'electrically'), ('TITLE', u'driven'), ('TITLE', u'Freedericksz'), ('TITLE', u'transitions'), ('JOURNAL', u'J.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'A'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Gen.'), ('VOLUME', u'39'), ('YEAR', u'2005'), ('PAGE', u'11'), ('DOI', u'10.1088/0305-4470/39/1/002'), ('REFPLAINTEXT', u'Napoli, G.: Weak anchoring effects in electrically driven Freedericksz transitions. J. Phys. A Math. Gen. 39, 11\u201331 (2005)'), ('REFSTR', "{u'bibunstructured': u'Napoli, G.: Weak anchoring effects in electrically driven Freedericksz transitions. J. Phys. A Math. Gen. 39, 11\\u201331 (2005)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Napoli', u'initials': u'G'}, u'occurrence': [{u'handle': u'2200181', u'@type': u'AMSID'}, {u'handle': u'10.1088/0305-4470/39/1/002', u'@type': u'DOI'}], u'journaltitle': u'J. Phys. A Math. Gen.', u'volumeid': u'39', u'firstpage': u'11', u'lastpage': u'31', u'year': u'2005', u'articletitle': {u'#text': u'Weak anchoring effects in electrically driven Freedericksz transitions', u'@language': u'En'}}, u'citationnumber': u'17.', u'@id': u'CR17'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Napoli'), ('TITLE', u'On'), ('TITLE', u'smectic-'), ('TITLE', u'A'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('TITLE', u'in'), ('TITLE', u'an'), ('TITLE', u'electrostatic'), ('TITLE', u'field'), ('JOURNAL', u'IMA'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'71'), ('ISSUE', u'1'), ('YEAR', u'2006'), ('PAGE', u'34'), ('DOI', u'10.1093/imamat/hxh080'), ('REFPLAINTEXT', u'Napoli, G.: On smectic-A liquid crystals in an electrostatic field. IMA J. Appl. Math. 71(1), 34\u201346 (2006)'), ('REFSTR', "{u'bibunstructured': u'Napoli, G.: On smectic-A liquid crystals in an electrostatic field. IMA J. Appl. Math. 71(1), 34\\u201346 (2006)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Napoli', u'initials': u'G'}, u'issueid': u'1', u'journaltitle': u'IMA J. Appl. Math.', u'volumeid': u'71', u'firstpage': u'34', u'lastpage': u'46', u'year': u'2006', u'articletitle': {u'#text': u'On smectic-A liquid crystals in an electrostatic field', u'@language': u'En'}, u'occurrence': [{u'handle': u'2203042', u'@type': u'AMSID'}, {u'handle': u'10.1093/imamat/hxh080', u'@type': u'DOI'}]}, u'citationnumber': u'18.', u'@id': u'CR18'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Napoli'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Nobili'), ('TITLE', u'Mechanically'), ('TITLE', u'induced'), ('TITLE', u'HelfrichHurault'), ('TITLE', u'effect'), ('TITLE', u'in'), ('TITLE', u'lamellar'), ('TITLE', u'systems'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'E'), ('VOLUME', u'80'), ('ISSUE', u'3'), ('YEAR', u'2009'), ('PAGE', u'031710'), ('REFPLAINTEXT', u'Napoli, G., Nobili, A.: Mechanically induced Helfrich\u2013Hurault effect in lamellar systems. Phys. Rev. E 80(3), 031710 (2009)'), ('REFSTR', "{u'bibunstructured': u'Napoli, G., Nobili, A.: Mechanically induced Helfrich\\u2013Hurault effect in lamellar systems. Phys. Rev. E 80(3), 031710 (2009)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Napoli', u'initials': u'G'}, {u'familyname': u'Nobili', u'initials': u'A'}], u'issueid': u'3', u'journaltitle': u'Phys. Rev. E', u'volumeid': u'80', u'firstpage': u'031710', u'year': u'2009', u'articletitle': {u'#text': u'Mechanically induced Helfrich\\u2013Hurault effect in lamellar systems', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1103/PhysRevE.80.031710', u'@type': u'DOI'}}, u'citationnumber': u'19.', u'@id': u'CR19'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Napoli'), ('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Turzi'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'determination'), ('TITLE', u'of'), ('TITLE', u'nontrivial'), ('TITLE', u'equilibrium'), ('TITLE', u'configurations'), ('TITLE', u'close'), ('TITLE', u'to'), ('TITLE', u'a'), ('TITLE', u'bifurcation'), ('TITLE', u'point'), ('JOURNAL', u'Comput.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Appl.'), ('VOLUME', u'55'), ('ISSUE', u'2'), ('YEAR', u'2008'), ('PAGE', u'299'), ('DOI', u'10.1016/j.camwa.2007.04.008'), ('REFPLAINTEXT', u'Napoli, G., Turzi, S.: On the determination of nontrivial equilibrium configurations close to a bifurcation point. Comput. Math. Appl. 55(2), 299\u2013306 (2008)'), ('REFSTR', "{u'bibunstructured': u'Napoli, G., Turzi, S.: On the determination of nontrivial equilibrium configurations close to a bifurcation point. Comput. Math. Appl. 55(2), 299\\u2013306 (2008)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Napoli', u'initials': u'G'}, {u'familyname': u'Turzi', u'initials': u'S'}], u'issueid': u'2', u'journaltitle': u'Comput. Math. Appl.', u'volumeid': u'55', u'firstpage': u'299', u'lastpage': u'306', u'year': u'2008', u'articletitle': {u'#text': u'On the determination of nontrivial equilibrium configurations close to a bifurcation point', u'@language': u'En'}, u'occurrence': [{u'handle': u'2383109', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.camwa.2007.04.008', u'@type': u'DOI'}]}, u'citationnumber': u'20.', u'@id': u'CR20'}")],
[('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Onuki'), ('AUTHOR_FIRST_NAME', u'JI'), ('AUTHOR_LAST_NAME', u'Fukuda'), ('TITLE', u'Electric'), ('TITLE', u'field'), ('TITLE', u'effects'), ('TITLE', u'and'), ('TITLE', u'form'), ('TITLE', u'birefringence'), ('TITLE', u'in'), ('TITLE', u'diblock'), ('TITLE', u'copolymers'), ('JOURNAL', u'Macromolecules'), ('VOLUME', u'28'), ('YEAR', u'1996'), ('PAGE', u'8788'), ('REFPLAINTEXT', u'Onuki, A., Fukuda, J.I.: Electric field effects and form birefringence in diblock copolymers. Macromolecules 28, 8788 (1996)'), ('REFSTR', "{u'bibunstructured': u'Onuki, A., Fukuda, J.I.: Electric field effects and form birefringence in diblock copolymers. Macromolecules 28, 8788 (1996)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Onuki', u'initials': u'A'}, {u'familyname': u'Fukuda', u'initials': u'JI'}], u'occurrence': {u'handle': u'10.1021/ma00130a011', u'@type': u'DOI'}, u'journaltitle': u'Macromolecules', u'volumeid': u'28', u'firstpage': u'8788', u'year': u'1996', u'articletitle': {u'#text': u'Electric field effects and form birefringence in diblock copolymers', u'@language': u'En'}}, u'citationnumber': u'21.', u'@id': u'CR21'}")],
[('AUTHOR_FIRST_NAME', u'JB'), ('AUTHOR_LAST_NAME', u'Poursamad'), ('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Hallaji'), ('TITLE', u'Freedericksz'), ('TITLE', u'transition'), ('TITLE', u'in'), ('TITLE', u'smectic-'), ('TITLE', u'A'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('TITLE', u'doped'), ('TITLE', u'by'), ('TITLE', u'ferroelectric'), ('TITLE', u'nanoparticles'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'B'), ('JOURNAL', u'Condens.'), ('JOURNAL', u'Matter'), ('VOLUME', u'504'), ('YEAR', u'2017'), ('PAGE', u'112'), ('REFPLAINTEXT', u'Poursamad, J.B., Hallaji, T.: Freedericksz transition in smectic-A liquid crystals doped by ferroelectric nanoparticles. Phys. B Condens. Matter 504, 112\u2013115 (2017)'), ('REFSTR', "{u'bibunstructured': u'Poursamad, J.B., Hallaji, T.: Freedericksz transition in smectic-A liquid crystals doped by ferroelectric nanoparticles. Phys. B Condens. Matter 504, 112\\u2013115 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Poursamad', u'initials': u'JB'}, {u'familyname': u'Hallaji', u'initials': u'T'}], u'occurrence': {u'handle': u'10.1016/j.physb.2016.10.022', u'@type': u'DOI'}, u'journaltitle': u'Phys. B Condens. Matter', u'volumeid': u'504', u'firstpage': u'112', u'lastpage': u'115', u'year': u'2017', u'articletitle': {u'#text': u'Freedericksz transition in smectic-A liquid crystals doped by ferroelectric nanoparticles', u'@language': u'En'}}, u'citationnumber': u'22.', u'@id': u'CR22'}")],
[('REFPLAINTEXT', u'Rapini, A., Papoular., M.: Distortion d\u2019une lamelle n\xe9matique sous champ magn\xe9tique. conditions d\u2019angrage aux paroix. J. Phys. Colloque C4, p. 54 (1969)'), ('REFSTR', "{u'bibunstructured': u'Rapini, A., Papoular., M.: Distortion d\\u2019une lamelle n\\xe9matique sous champ magn\\xe9tique. conditions d\\u2019angrage aux paroix. J. Phys. Colloque C4, p. 54 (1969)', u'citationnumber': u'23.', u'@id': u'CR23'}")],
[('AUTHOR_FIRST_NAME', u'R'), ('AUTHOR_LAST_NAME', u'Ribotta'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Durand'), ('TITLE', u'Mechanical'), ('TITLE', u'instabilities'), ('TITLE', u'of'), ('TITLE', u'smectic-'), ('TITLE', u'A'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('TITLE', u'under'), ('TITLE', u'dilatative'), ('TITLE', u'or'), ('TITLE', u'compressive'), ('TITLE', u'stresses'), ('JOURNAL', u'J.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'38'), ('YEAR', u'1977'), ('PAGE', u'179'), ('REFPLAINTEXT', u'Ribotta, R., Durand, G.: Mechanical instabilities of smectic-A liquid crystals under dilatative or compressive stresses. J. Phys. 38, 179\u2013203 (1977)'), ('REFSTR', "{u'bibunstructured': u'Ribotta, R., Durand, G.: Mechanical instabilities of smectic-A liquid crystals under dilatative or compressive stresses. J. Phys. 38, 179\\u2013203 (1977)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Ribotta', u'initials': u'R'}, {u'familyname': u'Durand', u'initials': u'G'}], u'occurrence': {u'handle': u'10.1051/jphys:01977003802017900', u'@type': u'DOI'}, u'journaltitle': u'J. Phys.', u'volumeid': u'38', u'firstpage': u'179', u'lastpage': u'203', u'year': u'1977', u'articletitle': {u'#text': u'Mechanical instabilities of smectic-A liquid crystals under dilatative or compressive stresses', u'@language': u'En'}}, u'citationnumber': u'24.', u'@id': u'CR24'}")],
[('AUTHOR_FIRST_NAME', u'CD'), ('AUTHOR_LAST_NAME', u'Santangelo'), ('AUTHOR_FIRST_NAME', u'RD'), ('AUTHOR_LAST_NAME', u'Kamien'), ('TITLE', u'Curvature'), ('TITLE', u'and'), ('TITLE', u'topology'), ('TITLE', u'in'), ('TITLE', u'smectic-'), ('TITLE', u'A'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('JOURNAL', u'Proc.'), ('JOURNAL', u'R.'), ('JOURNAL', u'Soc.'), ('JOURNAL', u'A'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Eng.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'461'), ('ISSUE', u'2061'), ('YEAR', u'2005'), ('PAGE', u'2911'), ('DOI', u'10.1098/rspa.2005.1534'), ('REFPLAINTEXT', u'Santangelo, C.D., Kamien, R.D.: Curvature and topology in smectic-A liquid crystals. Proc. R. Soc. A Math. Phys. Eng. Sci. 461(2061), 2911\u20132921 (2005)'), ('REFSTR', "{u'bibunstructured': u'Santangelo, C.D., Kamien, R.D.: Curvature and topology in smectic-A liquid crystals. Proc. R. Soc. A Math. Phys. Eng. Sci. 461(2061), 2911\\u20132921 (2005)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Santangelo', u'initials': u'CD'}, {u'familyname': u'Kamien', u'initials': u'RD'}], u'issueid': u'2061', u'journaltitle': u'Proc. R. Soc. A Math. Phys. Eng. Sci.', u'volumeid': u'461', u'firstpage': u'2911', u'lastpage': u'2921', u'year': u'2005', u'articletitle': {u'#text': u'Curvature and topology in smectic-A liquid crystals', u'@language': u'En'}, u'occurrence': [{u'handle': u'2165518', u'@type': u'AMSID'}, {u'handle': u'10.1098/rspa.2005.1534', u'@type': u'DOI'}]}, u'citationnumber': u'25.', u'@id': u'CR25'}")],
[('AUTHOR_FIRST_NAME', u'BI'), ('AUTHOR_LAST_NAME', u'Senyuk'), ('AUTHOR_FIRST_NAME', u'II'), ('AUTHOR_LAST_NAME', u'Smalyukh'), ('AUTHOR_FIRST_NAME', u'OD'), ('AUTHOR_LAST_NAME', u'Lavrentovich'), ('TITLE', u'Undulations'), ('TITLE', u'of'), ('TITLE', u'lamellar'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('TITLE', u'in'), ('TITLE', u'cells'), ('TITLE', u'with'), ('TITLE', u'finite'), ('TITLE', u'surface'), ('TITLE', u'anchoring'), ('TITLE', u'near'), ('TITLE', u'and'), ('TITLE', u'well'), ('TITLE', u'above'), ('TITLE', u'the'), ('TITLE', u'threshold'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'E'), ('VOLUME', u'74'), ('ISSUE', u'1'), ('YEAR', u'2006'), ('PAGE', u'011712'), ('REFPLAINTEXT', u'Senyuk, B.I., Smalyukh, I.I., Lavrentovich, O.D.: Undulations of lamellar liquid crystals in cells with finite surface anchoring near and well above the threshold. Phys. Rev. E 74(1), 011712 (2006)'), ('REFSTR', "{u'bibunstructured': u'Senyuk, B.I., Smalyukh, I.I., Lavrentovich, O.D.: Undulations of lamellar liquid crystals in cells with finite surface anchoring near and well above the threshold. Phys. Rev. E 74(1), 011712 (2006)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Senyuk', u'initials': u'BI'}, {u'familyname': u'Smalyukh', u'initials': u'II'}, {u'familyname': u'Lavrentovich', u'initials': u'OD'}], u'issueid': u'1', u'journaltitle': u'Phys. Rev. E', u'volumeid': u'74', u'firstpage': u'011712', u'year': u'2006', u'articletitle': {u'#text': u'Undulations of lamellar liquid crystals in cells with finite surface anchoring near and well above the threshold', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1103/PhysRevE.74.011712', u'@type': u'DOI'}}, u'citationnumber': u'26.', u'@id': u'CR26'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Seul'), ('AUTHOR_FIRST_NAME', u'R'), ('AUTHOR_LAST_NAME', u'Wolfe'), ('TITLE', u'Evolution'), ('TITLE', u'of'), ('TITLE', u'disorder'), ('TITLE', u'in'), ('TITLE', u'magnetic'), ('TITLE', u'stripe'), ('TITLE', u'domains.'), ('TITLE', u'I.'), ('TITLE', u'Transverse'), ('TITLE', u'instabilities'), ('TITLE', u'and'), ('TITLE', u'disclination'), ('TITLE', u'unbinding'), ('TITLE', u'in'), ('TITLE', u'lamellar'), ('TITLE', u'patterns'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'A'), ('VOLUME', u'46'), ('ISSUE', u'12'), ('YEAR', u'1992'), ('PAGE', u'7519'), ('REFPLAINTEXT', u'Seul, M., Wolfe, R.: Evolution of disorder in magnetic stripe domains. I. Transverse instabilities and disclination unbinding in lamellar patterns. Phys. Rev. A 46(12), 7519\u20137533 (1992)'), ('REFSTR', "{u'bibunstructured': u'Seul, M., Wolfe, R.: Evolution of disorder in magnetic stripe domains. I. Transverse instabilities and disclination unbinding in lamellar patterns. Phys. Rev. A 46(12), 7519\\u20137533 (1992)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Seul', u'initials': u'M'}, {u'familyname': u'Wolfe', u'initials': u'R'}], u'issueid': u'12', u'journaltitle': u'Phys. Rev. A', u'volumeid': u'46', u'firstpage': u'7519', u'lastpage': u'7533', u'year': u'1992', u'articletitle': {u'#text': u'Evolution of disorder in magnetic stripe domains. I. Transverse instabilities and disclination unbinding in lamellar patterns', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1103/PhysRevA.46.7519', u'@type': u'DOI'}}, u'citationnumber': u'27.', u'@id': u'CR27'}")],
[('AUTHOR_FIRST_NAME', u'AN'), ('AUTHOR_LAST_NAME', u'Shalaginov'), ('AUTHOR_FIRST_NAME', u'LD'), ('AUTHOR_LAST_NAME', u'Hazelwood'), ('AUTHOR_FIRST_NAME', u'TJ'), ('AUTHOR_LAST_NAME', u'Sluckin'), ('TITLE', u'Dynamics'), ('TITLE', u'of'), ('TITLE', u'chevron'), ('TITLE', u'structure'), ('TITLE', u'formation'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'E'), ('VOLUME', u'58'), ('ISSUE', u'6'), ('YEAR', u'1998'), ('PAGE', u'7455'), ('REFPLAINTEXT', u'Shalaginov, A.N., Hazelwood, L.D., Sluckin, T.J.: Dynamics of chevron structure formation. Phys. Rev. E 58(6), 7455\u20137464 (1998)'), ('REFSTR', "{u'bibunstructured': u'Shalaginov, A.N., Hazelwood, L.D., Sluckin, T.J.: Dynamics of chevron structure formation. Phys. Rev. E 58(6), 7455\\u20137464 (1998)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Shalaginov', u'initials': u'AN'}, {u'familyname': u'Hazelwood', u'initials': u'LD'}, {u'familyname': u'Sluckin', u'initials': u'TJ'}], u'issueid': u'6', u'journaltitle': u'Phys. Rev. E', u'volumeid': u'58', u'firstpage': u'7455', u'lastpage': u'7464', u'year': u'1998', u'articletitle': {u'#text': u'Dynamics of chevron structure formation', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1103/PhysRevE.58.7455', u'@type': u'DOI'}}, u'citationnumber': u'28.', u'@id': u'CR28'}")],
[('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Siemianowski'), ('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Brimicombe'), ('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Jaradat'), ('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Thompson'), ('AUTHOR_FIRST_NAME', u'W'), ('AUTHOR_LAST_NAME', u'Bras'), ('AUTHOR_FIRST_NAME', u'H'), ('AUTHOR_LAST_NAME', u'Gleeson'), ('TITLE', u'Reorientation'), ('TITLE', u'mechanisms'), ('TITLE', u'in'), ('TITLE', u'smectic'), ('TITLE', u'a'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('JOURNAL', u'Liq.'), ('JOURNAL', u'Cryst.'), ('VOLUME', u'39'), ('ISSUE', u'10'), ('YEAR', u'2012'), ('PAGE', u'1261'), ('REFPLAINTEXT', u'Siemianowski, S., Brimicombe, P., Jaradat, S., Thompson, P., Bras, W., Gleeson, H.: Reorientation mechanisms in smectic a liquid crystals. Liq. Cryst. 39(10), 1261\u20131275 (2012).'), ('REFSTR', "{u'bibunstructured': {u'#text': u'Siemianowski, S., Brimicombe, P., Jaradat, S., Thompson, P., Bras, W., Gleeson, H.: Reorientation mechanisms in smectic a liquid crystals. Liq. Cryst. 39(10), 1261\\u20131275 (2012).', u'externalref': {u'refsource': u'https://doi.org/10.1080/02678292.2012.714486', u'reftarget': {u'@address': u'10.1080/02678292.2012.714486', u'@targettype': u'DOI'}}}, u'bibarticle': {u'bibauthorname': [{u'familyname': u'Siemianowski', u'initials': u'S'}, {u'familyname': u'Brimicombe', u'initials': u'P'}, {u'familyname': u'Jaradat', u'initials': u'S'}, {u'familyname': u'Thompson', u'initials': u'P'}, {u'familyname': u'Bras', u'initials': u'W'}, {u'familyname': u'Gleeson', u'initials': u'H'}], u'issueid': u'10', u'journaltitle': u'Liq. Cryst.', u'volumeid': u'39', u'firstpage': u'1261', u'lastpage': u'1275', u'bibarticledoi': u'10.1080/02678292.2012.714486', u'year': u'2012', u'articletitle': {u'#text': u'Reorientation mechanisms in smectic a liquid crystals', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1080/02678292.2012.714486', u'@type': u'DOI'}}, u'citationnumber': u'29.', u'@id': u'CR29'}")],
[('AUTHOR_FIRST_NAME', u'SJ'), ('AUTHOR_LAST_NAME', u'Singer'), ('TITLE', u'Layer'), ('TITLE', u'buckling'), ('TITLE', u'in'), ('TITLE', u'smectic-'), ('TITLE', u'A'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('TITLE', u'and'), ('TITLE', u'two-'), ('TITLE', u'dimensional'), ('TITLE', u'stripe'), ('TITLE', u'phases'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'E'), ('VOLUME', u'48'), ('ISSUE', u'4'), ('YEAR', u'1993'), ('PAGE', u'2796'), ('REFPLAINTEXT', u'Singer, S.J.: Layer buckling in smectic-A liquid crystals and two-dimensional stripe phases. Phys. Rev. E 48(4), 2796\u20132804 (1993)'), ('REFSTR', "{u'bibunstructured': u'Singer, S.J.: Layer buckling in smectic-A liquid crystals and two-dimensional stripe phases. Phys. Rev. E 48(4), 2796\\u20132804 (1993)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Singer', u'initials': u'SJ'}, u'issueid': u'4', u'journaltitle': u'Phys. Rev. E', u'volumeid': u'48', u'firstpage': u'2796', u'lastpage': u'2804', u'year': u'1993', u'articletitle': {u'#text': u'Layer buckling in smectic-A liquid crystals and two-dimensional stripe phases', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1103/PhysRevE.48.2796', u'@type': u'DOI'}}, u'citationnumber': u'30.', u'@id': u'CR30'}")],
[('AUTHOR_FIRST_NAME', u'IW'), ('AUTHOR_LAST_NAME', u'Stewart'), ('TITLE', u'Layer'), ('TITLE', u'undulations'), ('TITLE', u'in'), ('TITLE', u'finite'), ('TITLE', u'samples'), ('TITLE', u'of'), ('TITLE', u'smectic-'), ('TITLE', u'A'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('TITLE', u'subjected'), ('TITLE', u'to'), ('TITLE', u'uniform'), ('TITLE', u'pressure'), ('TITLE', u'and'), ('TITLE', u'magnetic'), ('TITLE', u'fields'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'E'), ('VOLUME', u'58'), ('ISSUE', u'5'), ('YEAR', u'1998'), ('PAGE', u'5926'), ('REFPLAINTEXT', u'Stewart, I.W.: Layer undulations in finite samples of smectic-A liquid crystals subjected to uniform pressure and magnetic fields. Phys. Rev. E 58(5), 5926\u20135933 (1998)'), ('REFSTR', "{u'bibunstructured': u'Stewart, I.W.: Layer undulations in finite samples of smectic-A liquid crystals subjected to uniform pressure and magnetic fields. Phys. Rev. E 58(5), 5926\\u20135933 (1998)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Stewart', u'initials': u'IW'}, u'issueid': u'5', u'journaltitle': u'Phys. Rev. E', u'volumeid': u'58', u'firstpage': u'5926', u'lastpage': u'5933', u'year': u'1998', u'articletitle': {u'#text': u'Layer undulations in finite samples of smectic-A liquid crystals subjected to uniform pressure and magnetic fields', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1103/PhysRevE.58.5926', u'@type': u'DOI'}}, u'citationnumber': u'31.', u'@id': u'CR31'}")],
[('AUTHOR_FIRST_NAME', u'EG'), ('AUTHOR_LAST_NAME', u'Virga'), ('YEAR', u'1993'), ('PUBLISHER', u'Variational'), ('PUBLISHER', u'Theories'), ('PUBLISHER', u'for'), ('PUBLISHER', u'Liquid'), ('PUBLISHER', u'Crystals'), ('REFPLAINTEXT', u'Virga, E.G.: Variational Theories for Liquid Crystals. Chapman & Hall, London (1993)'), ('REFSTR', "{u'bibunstructured': u'Virga, E.G.: Variational Theories for Liquid Crystals. Chapman & Hall, London (1993)', u'citationnumber': u'32.', u'@id': u'CR32', u'bibbook': {u'bibauthorname': {u'familyname': u'Virga', u'initials': u'EG'}, u'publisherlocation': u'London', u'occurrence': {u'handle': u'0814.49002', u'@type': u'ZLBID'}, u'booktitle': u'Variational Theories for Liquid Crystals', u'year': u'1993', u'publishername': u'Chapman & Hall'}}")],
[('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Weinan'), ('TITLE', u'Nonlinear'), ('TITLE', u'continuum'), ('TITLE', u'theory'), ('TITLE', u'of'), ('TITLE', u'smectic-'), ('TITLE', u'A'), ('TITLE', u'liquid'), ('TITLE', u'crystals'), ('JOURNAL', u'Arch.'), ('JOURNAL', u'Ration.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Anal.'), ('VOLUME', u'137'), ('ISSUE', u'2'), ('YEAR', u'1997'), ('PAGE', u'159'), ('DOI', u'10.1007/s002050050026'), ('REFPLAINTEXT', u'Weinan, E.: Nonlinear continuum theory of smectic-A liquid crystals. Arch. Ration. Mech. Anal. 137(2), 159\u2013175 (1997)'), ('REFSTR', "{u'bibunstructured': u'Weinan, E.: Nonlinear continuum theory of smectic-A liquid crystals. Arch. Ration. Mech. Anal. 137(2), 159\\u2013175 (1997)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Weinan', u'initials': u'E'}, u'issueid': u'2', u'journaltitle': u'Arch. Ration. Mech. Anal.', u'volumeid': u'137', u'firstpage': u'159', u'lastpage': u'175', u'year': u'1997', u'articletitle': {u'#text': u'Nonlinear continuum theory of smectic-A liquid crystals', u'@language': u'En'}, u'occurrence': [{u'handle': u'1463793', u'@type': u'AMSID'}, {u'handle': u'10.1007/s002050050026', u'@type': u'DOI'}]}, u'citationnumber': u'33.', u'@id': u'CR33'}")],
[('AUTHOR_FIRST_NAME', u'ID'), ('AUTHOR_LAST_NAME', u'Abrahams'), ('AUTHOR_FIRST_NAME', u'GR'), ('AUTHOR_LAST_NAME', u'Wickham'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'scattering'), ('TITLE', u'of'), ('TITLE', u'sound'), ('TITLE', u'by'), ('TITLE', u'two'), ('TITLE', u'semi-'), ('TITLE', u'infinite'), ('TITLE', u'parallel'), ('TITLE', u'staggered'), ('TITLE', u'plates.'), ('TITLE', u'I.'), ('TITLE', u'Explicit'), ('TITLE', u'matrix'), ('TITLE', u'WienerHopf'), ('TITLE', u'factorization.'), ('JOURNAL', u'Proc.'), ('JOURNAL', u'R.'), ('JOURNAL', u'Soc.'), ('JOURNAL', u'Lond.'), ('JOURNAL', u'A'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'420'), ('YEAR', u'1988'), ('PAGE', u'131'), ('DOI', u'10.1098/rspa.1988.0121'), ('REFPLAINTEXT', u'Abrahams, I.D., Wickham, G.R.: On the scattering of sound by two semi-infinite parallel staggered plates. I. Explicit matrix Wiener\u2013Hopf factorization. Proc. R. Soc. Lond. A Math. Phys. Sci. 420, 131\u2013156 (1988)'), ('REFSTR', "{u'bibunstructured': u'Abrahams, I.D., Wickham, G.R.: On the scattering of sound by two semi-infinite parallel staggered plates. I. Explicit matrix Wiener\\u2013Hopf factorization. Proc. R. Soc. Lond. A Math. Phys. Sci. 420, 131\\u2013156 (1988)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Abrahams', u'initials': u'ID'}, {u'familyname': u'Wickham', u'initials': u'GR'}], u'occurrence': [{u'handle': u'982007', u'@type': u'AMSID'}, {u'handle': u'10.1098/rspa.1988.0121', u'@type': u'DOI'}], u'journaltitle': u'Proc. R. Soc. Lond. A Math. Phys. Sci.', u'volumeid': u'420', u'firstpage': u'131', u'lastpage': u'156', u'year': u'1988', u'articletitle': {u'#text': u'On the scattering of sound by two semi-infinite parallel staggered plates. I. Explicit matrix Wiener\\u2013Hopf factorization.', u'@outputmedium': u'All', u'@language': u'En'}}, u'citationnumber': u'1.', u'@id': u'CR1'}")],
[('AUTHOR_FIRST_NAME', u'ID'), ('AUTHOR_LAST_NAME', u'Abrahams'), ('AUTHOR_FIRST_NAME', u'GR'), ('AUTHOR_LAST_NAME', u'Wickham'), ('TITLE', u'The'), ('TITLE', u'scattering'), ('TITLE', u'of'), ('TITLE', u'sound'), ('TITLE', u'by'), ('TITLE', u'two'), ('TITLE', u'semi-'), ('TITLE', u'infinite'), ('TITLE', u'parallel'), ('TITLE', u'staggered'), ('TITLE', u'plates.'), ('TITLE', u'II.'), ('TITLE', u'Evaluation'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'velocity'), ('TITLE', u'potential'), ('TITLE', u'for'), ('TITLE', u'an'), ('TITLE', u'incident'), ('TITLE', u'plane'), ('TITLE', u'wave'), ('TITLE', u'and'), ('TITLE', u'an'), ('TITLE', u'incident'), ('TITLE', u'duct'), ('TITLE', u'mode'), ('JOURNAL', u'Proc.'), ('JOURNAL', u'R.'), ('JOURNAL', u'Soc.'), ('JOURNAL', u'Lond.'), ('JOURNAL', u'A'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'427'), ('ISSUE', u'1872'), ('YEAR', u'1990'), ('PAGE', u'139'), ('DOI', u'10.1098/rspa.1990.0006'), ('REFPLAINTEXT', u'Abrahams, I.D., Wickham, G.R.: The scattering of sound by two semi-infinite parallel staggered plates. II. Evaluation of the velocity potential for an incident plane wave and an incident duct mode. Proc. R. Soc. Lond. A Math. Phys. Sci. 427(1872), 139\u2013171 (1990)'), ('REFSTR', "{u'bibunstructured': u'Abrahams, I.D., Wickham, G.R.: The scattering of sound by two semi-infinite parallel staggered plates. II. Evaluation of the velocity potential for an incident plane wave and an incident duct mode. Proc. R. Soc. Lond. A Math. Phys. Sci. 427(1872), 139\\u2013171 (1990)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Abrahams', u'initials': u'ID'}, {u'familyname': u'Wickham', u'initials': u'GR'}], u'issueid': u'1872', u'journaltitle': u'Proc. R. Soc. Lond. A Math. Phys. Sci.', u'volumeid': u'427', u'firstpage': u'139', u'lastpage': u'171', u'year': u'1990', u'articletitle': {u'#text': u'The scattering of sound by two semi-infinite parallel staggered plates. II. Evaluation of the velocity potential for an incident plane wave and an incident duct mode', u'@language': u'En'}, u'occurrence': [{u'handle': u'1032983', u'@type': u'AMSID'}, {u'handle': u'10.1098/rspa.1990.0006', u'@type': u'DOI'}]}, u'citationnumber': u'2.', u'@id': u'CR2'}")],
[('AUTHOR_FIRST_NAME', u'ID'), ('AUTHOR_LAST_NAME', u'Abrahams'), ('AUTHOR_FIRST_NAME', u'GR'), ('AUTHOR_LAST_NAME', u'Wickham'), ('TITLE', u'Acoustic'), ('TITLE', u'scattering'), ('TITLE', u'by'), ('TITLE', u'two'), ('TITLE', u'parallel'), ('TITLE', u'slightly'), ('TITLE', u'staggered'), ('TITLE', u'rigid'), ('TITLE', u'plates'), ('JOURNAL', u'Wave'), ('JOURNAL', u'Motion'), ('VOLUME', u'12'), ('ISSUE', u'3'), ('YEAR', u'1990'), ('PAGE', u'281'), ('DOI', u'10.1016/0165-2125(90)90044-5'), ('REFPLAINTEXT', u'Abrahams, I.D., Wickham, G.R.: Acoustic scattering by two parallel slightly staggered rigid plates. Wave Motion 12(3), 281\u2013297 (1990)'), ('REFSTR', "{u'bibunstructured': u'Abrahams, I.D., Wickham, G.R.: Acoustic scattering by two parallel slightly staggered rigid plates. Wave Motion 12(3), 281\\u2013297 (1990)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Abrahams', u'initials': u'ID'}, {u'familyname': u'Wickham', u'initials': u'GR'}], u'issueid': u'3', u'journaltitle': u'Wave Motion', u'volumeid': u'12', u'firstpage': u'281', u'lastpage': u'297', u'year': u'1990', u'articletitle': {u'#text': u'Acoustic scattering by two parallel slightly staggered rigid plates', u'@language': u'En'}, u'occurrence': [{u'handle': u'1056278', u'@type': u'AMSID'}, {u'handle': u'10.1016/0165-2125(90)90044-5', u'@type': u'DOI'}]}, u'citationnumber': u'3.', u'@id': u'CR3'}")],
[('AUTHOR_FIRST_NAME', u'ID'), ('AUTHOR_LAST_NAME', u'Abrahams'), ('AUTHOR_FIRST_NAME', u'GR'), ('AUTHOR_LAST_NAME', u'Wickham'), ('TITLE', u'General'), ('TITLE', u'WienerHopf'), ('TITLE', u'factorization'), ('TITLE', u'of'), ('TITLE', u'matrix'), ('TITLE', u'kernels'), ('TITLE', u'with'), ('TITLE', u'exponential'), ('TITLE', u'phase'), ('TITLE', u'factors'), ('JOURNAL', u'SIAM'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'50'), ('ISSUE', u'3'), ('YEAR', u'1990'), ('PAGE', u'819'), ('DOI', u'10.1137/0150047'), ('REFPLAINTEXT', u'Abrahams, I.D., Wickham, G.R.: General Wiener\u2013Hopf factorization of matrix kernels with exponential phase factors. SIAM J. Appl. Math. 50(3), 819\u2013838 (1990)'), ('REFSTR', "{u'bibunstructured': u'Abrahams, I.D., Wickham, G.R.: General Wiener\\u2013Hopf factorization of matrix kernels with exponential phase factors. SIAM J. Appl. Math. 50(3), 819\\u2013838 (1990)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Abrahams', u'initials': u'ID'}, {u'familyname': u'Wickham', u'initials': u'GR'}], u'issueid': u'3', u'journaltitle': u'SIAM J. Appl. Math.', u'volumeid': u'50', u'firstpage': u'819', u'lastpage': u'838', u'year': u'1990', u'articletitle': {u'#text': u'General Wiener\\u2013Hopf factorization of matrix kernels with exponential phase factors', u'@language': u'En'}, u'occurrence': [{u'handle': u'1050914', u'@type': u'AMSID'}, {u'handle': u'10.1137/0150047', u'@type': u'DOI'}]}, u'citationnumber': u'4.', u'@id': u'CR4'}")],
[('REFPLAINTEXT', u'Noble, B.: Methods Based on the Wiener\u2013Hopf Technique. Pergamon Press, London (1958)'), ('REFSTR', "{u'bibunstructured': u'Noble, B.: Methods Based on the Wiener\\u2013Hopf Technique. Pergamon Press, London (1958)', u'citationnumber': u'5.', u'@id': u'CR5'}")],
[('AUTHOR_FIRST_NAME', u'IC'), ('AUTHOR_LAST_NAME', u'Gohberg'), ('AUTHOR_FIRST_NAME', u'MG'), ('AUTHOR_LAST_NAME', u'Krein'), ('TITLE', u'Systems'), ('TITLE', u'of'), ('TITLE', u'integral'), ('TITLE', u'equations'), ('TITLE', u'on'), ('TITLE', u'a'), ('TITLE', u'half'), ('TITLE', u'line'), ('TITLE', u'with'), ('TITLE', u'kernels'), ('TITLE', u'depending'), ('TITLE', u'on'), ('TITLE', u'the'), ('TITLE', u'difference'), ('TITLE', u'of'), ('TITLE', u'arguments'), ('JOURNAL', u'Am.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Soc.'), ('JOURNAL', u'Transl.'), ('JOURNAL', u'Ser.'), ('JOURNAL', u'2'), ('VOLUME', u'14'), ('YEAR', u'1960'), ('PAGE', u'217'), ('REFPLAINTEXT', u'Gohberg, I.C., Krein, M.G.: Systems of integral equations on a half line with kernels depending on the difference of arguments. Am. Math. Soc. Transl. Ser. 2 14, 217\u2013287 (1960)'), ('REFSTR', "{u'bibunstructured': u'Gohberg, I.C., Krein, M.G.: Systems of integral equations on a half line with kernels depending on the difference of arguments. Am. Math. Soc. Transl. Ser. 2 14, 217\\u2013287 (1960)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Gohberg', u'initials': u'IC'}, {u'familyname': u'Krein', u'initials': u'MG'}], u'occurrence': {u'handle': u'113114', u'@type': u'AMSID'}, u'journaltitle': u'Am. Math. Soc. Transl. Ser. 2', u'volumeid': u'14', u'firstpage': u'217', u'lastpage': u'287', u'year': u'1960', u'articletitle': {u'#text': u'Systems of integral equations on a half line with kernels depending on the difference of arguments', u'@language': u'En'}}, u'citationnumber': u'6.', u'@id': u'CR6'}")],
[('AUTHOR_FIRST_NAME', u'DS'), ('AUTHOR_LAST_NAME', u'Jones'), ('TITLE', u'Factorization'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'WienerHopf'), ('TITLE', u'matrix'), ('JOURNAL', u'IMA'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'32'), ('ISSUE', u'1\u20133'), ('YEAR', u'1984'), ('PAGE', u'211'), ('DOI', u'10.1093/imamat/32.1-3.211'), ('REFPLAINTEXT', u'Jones, D.S.: Factorization of a Wiener\u2013Hopf matrix. IMA J. Appl. Math. 32(1\u20133), 211\u2013220 (1984)'), ('REFSTR', "{u'bibunstructured': u'Jones, D.S.: Factorization of a Wiener\\u2013Hopf matrix. IMA J. Appl. Math. 32(1\\u20133), 211\\u2013220 (1984)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Jones', u'initials': u'DS'}, u'issueid': u'1\\u20133', u'journaltitle': u'IMA J. Appl. Math.', u'volumeid': u'32', u'firstpage': u'211', u'lastpage': u'220', u'year': u'1984', u'articletitle': {u'#text': u'Factorization of a Wiener\\u2013Hopf matrix', u'@language': u'En'}, u'occurrence': [{u'handle': u'740458', u'@type': u'AMSID'}, {u'handle': u'10.1093/imamat/32.1-3.211', u'@type': u'DOI'}]}, u'citationnumber': u'7.', u'@id': u'CR7'}")],
[('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Meister'), ('AUTHOR_FIRST_NAME', u'F-O'), ('AUTHOR_LAST_NAME', u'Speck'), ('YEAR', u'2012'), ('PAGE', u'385'), ('PUBLISHER', u'The'), ('PUBLISHER', u'Gohberg'), ('PUBLISHER', u'Anniversary'), ('PUBLISHER', u'Collection.'), ('PUBLISHER', u'Operator'), ('PUBLISHER', u'Theory:'), ('PUBLISHER', u'Advances'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Applications'), ('REFPLAINTEXT', u'Meister, E., Speck, F.-O.: Wiener\u2013Hopf factorization of certain non-rational matrix functions in mathematical physics. In: Dym, H., Goldberg, S., Kaashoek, M.A., Lancaster, P. (eds.) The Gohberg Anniversary Collection. Operator Theory: Advances and Applications, vol. 41, pp. 385\u2013394. Birkhauser, Basel (2012)'), ('REFSTR', "{u'bibunstructured': u'Meister, E., Speck, F.-O.: Wiener\\u2013Hopf factorization of certain non-rational matrix functions in mathematical physics. In: Dym, H., Goldberg, S., Kaashoek, M.A., Lancaster, P. (eds.) The Gohberg Anniversary Collection. Operator Theory: Advances and Applications, vol. 41, pp. 385\\u2013394. Birkhauser, Basel (2012)', u'bibchapter': {u'eds': {u'publisherlocation': u'Basel', u'booktitle': u'The Gohberg Anniversary Collection. Operator Theory: Advances and Applications', u'firstpage': u'385', u'lastpage': u'394', u'numberinseries': u'41', u'publishername': u'Birkhauser'}, u'bibauthorname': [{u'familyname': u'Meister', u'initials': u'E'}, {u'familyname': u'Speck', u'initials': u'F-O'}], u'chaptertitle': {u'#text': u'Wiener\\u2013Hopf factorization of certain non-rational matrix functions in mathematical physics', u'@language': u'En'}, u'bibeditorname': [{u'familyname': u'Dym', u'initials': u'H'}, {u'familyname': u'Goldberg', u'initials': u'S'}, {u'familyname': u'Kaashoek', u'initials': u'MA'}, {u'familyname': u'Lancaster', u'initials': u'P'}], u'year': u'2012'}, u'citationnumber': u'8.', u'@id': u'CR8'}")],
[('AUTHOR_FIRST_NAME', u'AE'), ('AUTHOR_LAST_NAME', u'Heins'), ('TITLE', u'The'), ('TITLE', u'scope'), ('TITLE', u'and'), ('TITLE', u'limitations'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'method'), ('TITLE', u'of'), ('TITLE', u'Wiener'), ('TITLE', u'and'), ('TITLE', u'Hopf'), ('JOURNAL', u'Commun.'), ('JOURNAL', u'Pure'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'IX'), ('YEAR', u'1956'), ('PAGE', u'447'), ('DOI', u'10.1002/cpa.3160090316'), ('REFPLAINTEXT', u'Heins, A.E.: The scope and limitations of the method of Wiener and Hopf. Commun. Pure Appl. Math. IX, 447\u2013466 (1956)'), ('REFSTR', "{u'bibunstructured': u'Heins, A.E.: The scope and limitations of the method of Wiener and Hopf. Commun. Pure Appl. Math. IX, 447\\u2013466 (1956)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Heins', u'initials': u'AE'}, u'occurrence': [{u'handle': u'81977', u'@type': u'AMSID'}, {u'handle': u'10.1002/cpa.3160090316', u'@type': u'DOI'}], u'journaltitle': u'Commun. Pure Appl. Math.', u'volumeid': u'IX', u'firstpage': u'447', u'lastpage': u'466', u'year': u'1956', u'articletitle': {u'#text': u'The scope and limitations of the method of Wiener and Hopf', u'@language': u'En'}}, u'citationnumber': u'9.', u'@id': u'CR9'}")],
[('AUTHOR_FIRST_NAME', u'I'), ('AUTHOR_LAST_NAME', u'Gohberg'), ('AUTHOR_FIRST_NAME', u'MA'), ('AUTHOR_LAST_NAME', u'Kaashoek'), ('AUTHOR_FIRST_NAME', u'IM'), ('AUTHOR_LAST_NAME', u'Spitkovsky'), ('YEAR', u'2000'), ('PAGE', u'1'), ('PUBLISHER', u'Factorization'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Integrable'), ('PUBLISHER', u'Systems'), ('REFPLAINTEXT', u'Gohberg, I., Kaashoek, M.A., Spitkovsky, I.M.: An overview of matrix factorization theory and operator applications. In: Gohberg, I., Manojlovic, N., dos Santos, A.F. (eds.) Factorization and Integrable Systems, pp. 1\u2013102. Birkh\xe4user, Basel (2000)'), ('REFSTR', "{u'bibunstructured': u'Gohberg, I., Kaashoek, M.A., Spitkovsky, I.M.: An overview of matrix factorization theory and operator applications. In: Gohberg, I., Manojlovic, N., dos Santos, A.F. (eds.) Factorization and Integrable Systems, pp. 1\\u2013102. Birkh\\xe4user, Basel (2000)', u'bibchapter': {u'eds': {u'publisherlocation': u'Basel', u'booktitle': u'Factorization and Integrable Systems', u'publishername': u'Birkh\\xe4user', u'firstpage': u'1', u'lastpage': u'102'}, u'bibauthorname': [{u'familyname': u'Gohberg', u'initials': u'I'}, {u'familyname': u'Kaashoek', u'initials': u'MA'}, {u'familyname': u'Spitkovsky', u'initials': u'IM'}], u'chaptertitle': {u'#text': u'An overview of matrix factorization theory and operator applications', u'@language': u'En'}, u'bibeditorname': [{u'familyname': u'Gohberg', u'initials': u'I'}, {u'familyname': u'Manojlovic', u'initials': u'N'}, {u'familyname': u'Santos', u'particle': u'dos', u'initials': u'AF'}], u'year': u'2000'}, u'citationnumber': u'10.', u'@id': u'CR10'}")],
[('AUTHOR_FIRST_NAME', u'AV'), ('AUTHOR_LAST_NAME', u'Kisil'), ('TITLE', u'An'), ('TITLE', u'iterative'), ('TITLE', u'WienerHopf'), ('TITLE', u'method'), ('TITLE', u'for'), ('TITLE', u'triangular'), ('TITLE', u'matrix'), ('TITLE', u'functions'), ('TITLE', u'with'), ('TITLE', u'exponential'), ('TITLE', u'factors'), ('JOURNAL', u'SIAM'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'78'), ('ISSUE', u'1'), ('YEAR', u'2018'), ('PAGE', u'45'), ('DOI', u'10.1137/17M1136304'), ('REFPLAINTEXT', u'Kisil, A.V.: An iterative Wiener\u2013Hopf method for triangular matrix functions with exponential factors. SIAM J. Appl. Math. 78(1), 45\u201362 (2018)'), ('REFSTR', "{u'bibunstructured': u'Kisil, A.V.: An iterative Wiener\\u2013Hopf method for triangular matrix functions with exponential factors. SIAM J. Appl. Math. 78(1), 45\\u201362 (2018)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Kisil', u'initials': u'AV'}, u'issueid': u'1', u'journaltitle': u'SIAM J. Appl. Math.', u'volumeid': u'78', u'firstpage': u'45', u'lastpage': u'62', u'year': u'2018', u'articletitle': {u'#text': u'An iterative Wiener\\u2013Hopf method for triangular matrix functions with exponential factors', u'@language': u'En'}, u'occurrence': [{u'handle': u'3742700', u'@type': u'AMSID'}, {u'handle': u'10.1137/17M1136304', u'@type': u'DOI'}]}, u'citationnumber': u'11.', u'@id': u'CR11'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Mishuris'), ('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Rogosin'), ('TITLE', u'Factorization'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'class'), ('TITLE', u'of'), ('TITLE', u'matrix-'), ('TITLE', u'functions'), ('TITLE', u'with'), ('TITLE', u'stable'), ('TITLE', u'partial'), ('TITLE', u'indices'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Methods'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'39'), ('ISSUE', u'13'), ('YEAR', u'2016'), ('PAGE', u'3791'), ('DOI', u'10.1002/mma.3825'), ('REFPLAINTEXT', u'Mishuris, G., Rogosin, S.: Factorization of a class of matrix-functions with stable partial indices. Math. Methods Appl. Sci. 39(13), 3791\u20133807 (2016)'), ('REFSTR', "{u'bibunstructured': u'Mishuris, G., Rogosin, S.: Factorization of a class of matrix-functions with stable partial indices. Math. Methods Appl. Sci. 39(13), 3791\\u20133807 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Mishuris', u'initials': u'G'}, {u'familyname': u'Rogosin', u'initials': u'S'}], u'issueid': u'13', u'journaltitle': u'Math. Methods Appl. Sci.', u'volumeid': u'39', u'firstpage': u'3791', u'lastpage': u'3807', u'year': u'2016', u'articletitle': {u'#text': u'Factorization of a class of matrix-functions with stable partial indices', u'@language': u'En'}, u'occurrence': [{u'handle': u'3529384', u'@type': u'AMSID'}, {u'handle': u'10.1002/mma.3825', u'@type': u'DOI'}]}, u'citationnumber': u'12.', u'@id': u'CR12'}")],
[('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Rogosin'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Mishuris'), ('TITLE', u'Constructive'), ('TITLE', u'methods'), ('TITLE', u'for'), ('TITLE', u'factorization'), ('TITLE', u'of'), ('TITLE', u'matrix-'), ('TITLE', u'functions'), ('JOURNAL', u'IMA'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'81'), ('ISSUE', u'2'), ('YEAR', u'2015'), ('PAGE', u'365'), ('DOI', u'10.1093/imamat/hxv038'), ('REFPLAINTEXT', u'Rogosin, S., Mishuris, G.: Constructive methods for factorization of matrix-functions. IMA J. Appl. Math. 81(2), 365\u2013391 (2015)'), ('REFSTR', "{u'bibunstructured': u'Rogosin, S., Mishuris, G.: Constructive methods for factorization of matrix-functions. IMA J. Appl. Math. 81(2), 365\\u2013391 (2015)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Rogosin', u'initials': u'S'}, {u'familyname': u'Mishuris', u'initials': u'G'}], u'issueid': u'2', u'journaltitle': u'IMA J. Appl. Math.', u'volumeid': u'81', u'firstpage': u'365', u'lastpage': u'391', u'year': u'2015', u'articletitle': {u'#text': u'Constructive methods for factorization of matrix-functions', u'@language': u'En'}, u'occurrence': [{u'handle': u'3483088', u'@type': u'AMSID'}, {u'handle': u'10.1093/imamat/hxv038', u'@type': u'DOI'}]}, u'citationnumber': u'13.', u'@id': u'CR13'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Mishuris'), ('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Rogosin'), ('TITLE', u'Regular'), ('TITLE', u'approximate'), ('TITLE', u'factorization'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'class'), ('TITLE', u'of'), ('TITLE', u'matrix-'), ('TITLE', u'function'), ('TITLE', u'with'), ('TITLE', u'an'), ('TITLE', u'unstable'), ('TITLE', u'set'), ('TITLE', u'of'), ('TITLE', u'partial'), ('TITLE', u'indices'), ('JOURNAL', u'Proc.'), ('JOURNAL', u'R.'), ('JOURNAL', u'Soc.'), ('JOURNAL', u'A'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Eng.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'474'), ('ISSUE', u'2209'), ('YEAR', u'2018'), ('PAGE', u'20170279'), ('DOI', u'10.1098/rspa.2017.0279'), ('REFPLAINTEXT', u'Mishuris, G., Rogosin, S.: Regular approximate factorization of a class of matrix-function with an unstable set of partial indices. Proc. R. Soc. A Math. Phys. Eng. Sci. 474(2209), 20170279 (2018)'), ('REFSTR', "{u'bibunstructured': u'Mishuris, G., Rogosin, S.: Regular approximate factorization of a class of matrix-function with an unstable set of partial indices. Proc. R. Soc. A Math. Phys. Eng. Sci. 474(2209), 20170279 (2018)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Mishuris', u'initials': u'G'}, {u'familyname': u'Rogosin', u'initials': u'S'}], u'issueid': u'2209', u'journaltitle': u'Proc. R. Soc. A Math. Phys. Eng. Sci.', u'volumeid': u'474', u'firstpage': u'20170279', u'year': u'2018', u'articletitle': {u'#text': u'Regular approximate factorization of a class of matrix-function with an unstable set of partial indices', u'@language': u'En'}, u'occurrence': [{u'handle': u'3762905', u'@type': u'AMSID'}, {u'handle': u'10.1098/rspa.2017.0279', u'@type': u'DOI'}]}, u'citationnumber': u'14.', u'@id': u'CR14'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Mishuris'), ('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Rogosin'), ('TITLE', u'An'), ('TITLE', u'asymptotic'), ('TITLE', u'method'), ('TITLE', u'of'), ('TITLE', u'factorization'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'class'), ('TITLE', u'of'), ('TITLE', u'matrix'), ('TITLE', u'functions'), ('JOURNAL', u'Proc.'), ('JOURNAL', u'R.'), ('JOURNAL', u'Soc.'), ('JOURNAL', u'A'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Eng.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'470'), ('YEAR', u'2014'), ('PAGE', u'20140109'), ('REFPLAINTEXT', u'Mishuris, G., Rogosin, S.: An asymptotic method of factorization of a class of matrix functions. Proc. R. Soc. A Math. Phys. Eng. Sci. 470, 20140109 (2014)'), ('REFSTR', "{u'bibunstructured': u'Mishuris, G., Rogosin, S.: An asymptotic method of factorization of a class of matrix functions. Proc. R. Soc. A Math. Phys. Eng. Sci. 470, 20140109 (2014)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Mishuris', u'initials': u'G'}, {u'familyname': u'Rogosin', u'initials': u'S'}], u'occurrence': {u'handle': u'10.1098/rspa.2014.0109', u'@type': u'DOI'}, u'journaltitle': u'Proc. R. Soc. A Math. Phys. Eng. Sci.', u'volumeid': u'470', u'firstpage': u'20140109', u'year': u'2014', u'articletitle': {u'#text': u'An asymptotic method of factorization of a class of matrix functions', u'@language': u'En'}}, u'citationnumber': u'15.', u'@id': u'CR15'}")],
[('AUTHOR_FIRST_NAME', u'JD'), ('AUTHOR_LAST_NAME', u'Achenbach'), ('YEAR', u'2012'), ('PUBLISHER', u'Wave'), ('PUBLISHER', u'Propagation'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Elastic'), ('PUBLISHER', u'Solids.'), ('PUBLISHER', u'North-'), ('PUBLISHER', u'Holland'), ('PUBLISHER', u'Series'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Applied'), ('PUBLISHER', u'Mathematics'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Mechanics'), ('VOLUME', u'1'), ('REFPLAINTEXT', u'Achenbach, J.D.: Wave Propagation in Elastic Solids. North-Holland Series in Applied Mathematics and Mechanics, vol. 16, 1st edn. North-Holland Publishing Co., Amsterdam (2012)'), ('REFSTR', "{u'bibunstructured': u'Achenbach, J.D.: Wave Propagation in Elastic Solids. North-Holland Series in Applied Mathematics and Mechanics, vol. 16, 1st edn. North-Holland Publishing Co., Amsterdam (2012)', u'citationnumber': u'16.', u'@id': u'CR16', u'bibbook': {u'bibauthorname': {u'familyname': u'Achenbach', u'initials': u'JD'}, u'publishername': u'North-Holland Publishing Co.', u'booktitle': u'Wave Propagation in Elastic Solids. North-Holland Series in Applied Mathematics and Mechanics', u'year': u'2012', u'numberinseries': u'16', u'editionnumber': u'1', u'publisherlocation': u'Amsterdam'}}")],
[('AUTHOR_FIRST_NAME', u'J'), ('AUTHOR_LAST_NAME', u'Miklowitz'), ('YEAR', u'2012'), ('PUBLISHER', u'The'), ('PUBLISHER', u'Theory'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Elastic'), ('PUBLISHER', u'Waves'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Waveguides.'), ('PUBLISHER', u'North-'), ('PUBLISHER', u'Holland'), ('PUBLISHER', u'Series'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Applied'), ('PUBLISHER', u'Mathematics'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Mechanics'), ('REFPLAINTEXT', u'Miklowitz, J.: The Theory of Elastic Waves and Waveguides. North-Holland Series in Applied Mathematics and Mechanics, vol. 22. North-Holland Publishing Co., Amsterdam (2012)'), ('REFSTR', "{u'bibunstructured': u'Miklowitz, J.: The Theory of Elastic Waves and Waveguides. North-Holland Series in Applied Mathematics and Mechanics, vol. 22. North-Holland Publishing Co., Amsterdam (2012)', u'citationnumber': u'17.', u'@id': u'CR17', u'bibbook': {u'bibauthorname': {u'familyname': u'Miklowitz', u'initials': u'J'}, u'publisherlocation': u'Amsterdam', u'booktitle': u'The Theory of Elastic Waves and Waveguides. North-Holland Series in Applied Mathematics and Mechanics', u'year': u'2012', u'numberinseries': u'22', u'publishername': u'North-Holland Publishing Co.'}}")],
[('AUTHOR_FIRST_NAME', u'I.David'), ('AUTHOR_LAST_NAME', u'Abrahams'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'application'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'WienerHopf'), ('TITLE', u'technique'), ('TITLE', u'to'), ('TITLE', u'problems'), ('TITLE', u'in'), ('TITLE', u'dynamic'), ('TITLE', u'elasticity'), ('JOURNAL', u'Wave'), ('JOURNAL', u'Motion'), ('VOLUME', u'36'), ('ISSUE', u'4'), ('YEAR', u'2002'), ('PAGE', u'311'), ('DOI', u'10.1016/S0165-2125(02)00027-6'), ('REFPLAINTEXT', u'Abrahams, I.D.: On the application of the Wiener\u2013Hopf technique to problems in dynamic elasticity. Wave Motion 36(4), 311\u2013333 (2002)'), ('REFSTR', "{u'bibunstructured': u'Abrahams, I.D.: On the application of the Wiener\\u2013Hopf technique to problems in dynamic elasticity. Wave Motion 36(4), 311\\u2013333 (2002)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Abrahams', u'initials': u'I.David'}, u'issueid': u'4', u'journaltitle': u'Wave Motion', u'volumeid': u'36', u'firstpage': u'311', u'lastpage': u'333', u'year': u'2002', u'articletitle': {u'#text': u'On the application of the Wiener\\u2013Hopf technique to problems in dynamic elasticity', u'@language': u'En'}, u'occurrence': [{u'handle': u'1950990', u'@type': u'AMSID'}, {u'handle': u'10.1016/S0165-2125(02)00027-6', u'@type': u'DOI'}]}, u'citationnumber': u'18.', u'@id': u'CR18'}")],
[('AUTHOR_FIRST_NAME', u'BL'), ('AUTHOR_LAST_NAME', u'Sharma'), ('TITLE', u'Diffraction'), ('TITLE', u'of'), ('TITLE', u'waves'), ('TITLE', u'on'), ('TITLE', u'square'), ('TITLE', u'lattice'), ('TITLE', u'by'), ('TITLE', u'semi-'), ('TITLE', u'infinite'), ('TITLE', u'crack'), ('JOURNAL', u'SIAM'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'75'), ('ISSUE', u'3'), ('YEAR', u'2015'), ('PAGE', u'1171'), ('DOI', u'10.1137/140985093'), ('REFPLAINTEXT', u'Sharma, B.L.: Diffraction of waves on square lattice by semi-infinite crack. SIAM J. Appl. Math. 75(3), 1171\u20131192 (2015)'), ('REFSTR', "{u'bibunstructured': u'Sharma, B.L.: Diffraction of waves on square lattice by semi-infinite crack. SIAM J. Appl. Math. 75(3), 1171\\u20131192 (2015)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Sharma', u'initials': u'BL'}, u'issueid': u'3', u'journaltitle': u'SIAM J. Appl. Math.', u'volumeid': u'75', u'firstpage': u'1171', u'lastpage': u'1192', u'year': u'2015', u'articletitle': {u'#text': u'Diffraction of waves on square lattice by semi-infinite crack', u'@language': u'En'}, u'occurrence': [{u'handle': u'3355779', u'@type': u'AMSID'}, {u'handle': u'10.1137/140985093', u'@type': u'DOI'}]}, u'citationnumber': u'19.', u'@id': u'CR19'}")],
[('AUTHOR_FIRST_NAME', u'BL'), ('AUTHOR_LAST_NAME', u'Sharma'), ('TITLE', u'Near-'), ('TITLE', u'tip'), ('TITLE', u'field'), ('TITLE', u'for'), ('TITLE', u'diffraction'), ('TITLE', u'on'), ('TITLE', u'square'), ('TITLE', u'lattice'), ('TITLE', u'by'), ('TITLE', u'crack'), ('JOURNAL', u'SIAM'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'75'), ('ISSUE', u'4'), ('YEAR', u'2015'), ('PAGE', u'1915'), ('DOI', u'10.1137/15M1010646'), ('REFPLAINTEXT', u'Sharma, B.L.: Near-tip field for diffraction on square lattice by crack. SIAM J. Appl. Math. 75(4), 1915\u20131940 (2015)'), ('REFSTR', "{u'bibunstructured': u'Sharma, B.L.: Near-tip field for diffraction on square lattice by crack. SIAM J. Appl. Math. 75(4), 1915\\u20131940 (2015)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Sharma', u'initials': u'BL'}, u'issueid': u'4', u'journaltitle': u'SIAM J. Appl. Math.', u'volumeid': u'75', u'firstpage': u'1915', u'lastpage': u'1940', u'year': u'2015', u'articletitle': {u'#text': u'Near-tip field for diffraction on square lattice by crack', u'@language': u'En'}, u'occurrence': [{u'handle': u'3390158', u'@type': u'AMSID'}, {u'handle': u'10.1137/15M1010646', u'@type': u'DOI'}]}, u'citationnumber': u'20.', u'@id': u'CR20'}")],
[('AUTHOR_FIRST_NAME', u'LI'), ('AUTHOR_LAST_NAME', u'Slepyan'), ('YEAR', u'2002'), ('PUBLISHER', u'Models'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Phenomena'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Fracture'), ('PUBLISHER', u'Mechanics'), ('REFPLAINTEXT', u'Slepyan, L.I.: Models and Phenomena in Fracture Mechanics. Springer, Berlin (2002)'), ('REFSTR', "{u'bibunstructured': u'Slepyan, L.I.: Models and Phenomena in Fracture Mechanics. Springer, Berlin (2002)', u'citationnumber': u'21.', u'@id': u'CR21', u'bibbook': {u'bibauthorname': {u'familyname': u'Slepyan', u'initials': u'LI'}, u'publisherlocation': u'Berlin', u'occurrence': {u'handle': u'10.1007/978-3-540-48010-5', u'@type': u'DOI'}, u'booktitle': u'Models and Phenomena in Fracture Mechanics', u'year': u'2002', u'publishername': u'Springer'}}")],
[('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Meister'), ('AUTHOR_FIRST_NAME', u'K'), ('AUTHOR_LAST_NAME', u'Rottbrand'), ('TITLE', u'Elastodynamical'), ('TITLE', u'scattering'), ('TITLE', u'by'), ('TITLE', u'N'), ('TITLE', u'parallel'), ('TITLE', u'half-'), ('TITLE', u'planes'), ('TITLE', u'in'), ('TITLE', u'{'), ('TITLE', u'R}^3'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Nachrichten'), ('VOLUME', u'177'), ('YEAR', u'1996'), ('PAGE', u'189'), ('DOI', u'10.1002/mana.19961770112'), ('REFPLAINTEXT', u'Meister, E., Rottbrand, K.: Elastodynamical scattering by N parallel half-planes in { R}^3. Math. Nachrichten 177, 189\u2013232 (1996)'), ('REFSTR', "{u'bibunstructured': u'Meister, E., Rottbrand, K.: Elastodynamical scattering by N parallel half-planes in { R}^3. Math. Nachrichten 177, 189\\u2013232 (1996)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Meister', u'initials': u'E'}, {u'familyname': u'Rottbrand', u'initials': u'K'}], u'occurrence': [{u'handle': u'1374950', u'@type': u'AMSID'}, {u'handle': u'10.1002/mana.19961770112', u'@type': u'DOI'}], u'journaltitle': u'Math. Nachrichten', u'volumeid': u'177', u'firstpage': u'189', u'lastpage': u'232', u'year': u'1996', u'articletitle': {u'#text': u'Elastodynamical scattering by N parallel half-planes in { R}^3', u'@language': u'En'}}, u'citationnumber': u'22.', u'@id': u'CR22'}")],
[('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Meister'), ('AUTHOR_FIRST_NAME', u'K'), ('AUTHOR_LAST_NAME', u'Rottbrand'), ('TITLE', u'Elastodynamical'), ('TITLE', u'scattering'), ('TITLE', u'by'), ('TITLE', u'N'), ('TITLE', u'parallel'), ('TITLE', u'half-'), ('TITLE', u'planes'), ('TITLE', u'in'), ('TITLE', u'{'), ('TITLE', u'R}^3'), ('TITLE', u'II'), ('TITLE', u'Explicit'), ('TITLE', u'solutions'), ('TITLE', u'for'), ('TITLE', u'N=2'), ('TITLE', u'by'), ('TITLE', u'explicit'), ('TITLE', u'symbol'), ('TITLE', u'factorization'), ('JOURNAL', u'Integral'), ('JOURNAL', u'Equ.'), ('JOURNAL', u'Oper.'), ('JOURNAL', u'Theory'), ('VOLUME', u'29'), ('ISSUE', u'1'), ('YEAR', u'1997'), ('PAGE', u'70'), ('REFPLAINTEXT', u'Meister, E., Rottbrand, K.: Elastodynamical scattering by N parallel half-planes in { R}^3 II Explicit solutions for N=2 by explicit symbol factorization. Integral Equ. Oper. Theory 29(1), 70\u2013109 (1997)'), ('REFSTR', "{u'bibunstructured': u'Meister, E., Rottbrand, K.: Elastodynamical scattering by N parallel half-planes in { R}^3 II Explicit solutions for N=2 by explicit symbol factorization. Integral Equ. Oper. Theory 29(1), 70\\u2013109 (1997)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Meister', u'initials': u'E'}, {u'familyname': u'Rottbrand', u'initials': u'K'}], u'issueid': u'1', u'journaltitle': u'Integral Equ. Oper. Theory', u'volumeid': u'29', u'firstpage': u'70', u'lastpage': u'109', u'year': u'1997', u'articletitle': {u'#text': u'Elastodynamical scattering by N parallel half-planes in { R}^3 II Explicit solutions for N=2 by explicit symbol factorization', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1007/BF01191481', u'@type': u'DOI'}}, u'citationnumber': u'23.', u'@id': u'CR23'}")],
[('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Meister'), ('AUTHOR_FIRST_NAME', u'K'), ('AUTHOR_LAST_NAME', u'Rottbrand'), ('AUTHOR_FIRST_NAME', u'F-O'), ('AUTHOR_LAST_NAME', u'Speck'), ('TITLE', u'WienerHopf'), ('TITLE', u'equations'), ('TITLE', u'for'), ('TITLE', u'waves'), ('TITLE', u'scattered'), ('TITLE', u'by'), ('TITLE', u'a'), ('TITLE', u'system'), ('TITLE', u'of'), ('TITLE', u'parallel'), ('TITLE', u'Sommerfeld'), ('TITLE', u'half-'), ('TITLE', u'planes'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Methods'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'14'), ('ISSUE', u'8'), ('YEAR', u'1991'), ('PAGE', u'525'), ('DOI', u'10.1002/mma.1670140802'), ('REFPLAINTEXT', u'Meister, E., Rottbrand, K., Speck, F.-O.: Wiener\u2013Hopf equations for waves scattered by a system of parallel Sommerfeld half-planes. Math. Methods Appl. Sci. 14(8), 525\u2013552 (1991)'), ('REFSTR', "{u'bibunstructured': u'Meister, E., Rottbrand, K., Speck, F.-O.: Wiener\\u2013Hopf equations for waves scattered by a system of parallel Sommerfeld half-planes. Math. Methods Appl. Sci. 14(8), 525\\u2013552 (1991)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Meister', u'initials': u'E'}, {u'familyname': u'Rottbrand', u'initials': u'K'}, {u'familyname': u'Speck', u'initials': u'F-O'}], u'issueid': u'8', u'journaltitle': u'Math. Methods Appl. Sci.', u'volumeid': u'14', u'firstpage': u'525', u'lastpage': u'552', u'year': u'1991', u'articletitle': {u'#text': u'Wiener\\u2013Hopf equations for waves scattered by a system of parallel Sommerfeld half-planes', u'@language': u'En'}, u'occurrence': [{u'handle': u'1129187', u'@type': u'AMSID'}, {u'handle': u'10.1002/mma.1670140802', u'@type': u'DOI'}]}, u'citationnumber': u'24.', u'@id': u'CR24'}")],
[('AUTHOR_FIRST_NAME', u'EI'), ('AUTHOR_LAST_NAME', u'Jury'), ('YEAR', u'1964'), ('PUBLISHER', u'Theory'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Application'), ('PUBLISHER', u'of'), ('PUBLISHER', u'the'), ('PUBLISHER', u'z-'), ('PUBLISHER', u'Transform'), ('PUBLISHER', u'Method'), ('REFPLAINTEXT', u'Jury, E.I.: Theory and Application of the z-Transform Method. Wiley, New York (1964)'), ('REFSTR', "{u'bibunstructured': u'Jury, E.I.: Theory and Application of the z-Transform Method. Wiley, New York (1964)', u'citationnumber': u'25.', u'@id': u'CR25', u'bibbook': {u'publisherlocation': u'New York', u'bibauthorname': {u'familyname': u'Jury', u'initials': u'EI'}, u'publishername': u'Wiley', u'booktitle': u'Theory and Application of the z-Transform Method', u'year': u'1964'}}")],
[('AUTHOR_FIRST_NAME', u'VG'), ('AUTHOR_LAST_NAME', u'Daniele'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'solution'), ('TITLE', u'of'), ('TITLE', u'two'), ('TITLE', u'coupled'), ('TITLE', u'WienerHopf'), ('TITLE', u'equations'), ('JOURNAL', u'SIAM'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'44'), ('ISSUE', u'4'), ('YEAR', u'1984'), ('PAGE', u'667'), ('DOI', u'10.1137/0144048'), ('REFPLAINTEXT', u'Daniele, V.G.: On the solution of two coupled Wiener\u2013Hopf equations. SIAM J. Appl. Math. 44(4), 667\u2013680 (1984)'), ('REFSTR', "{u'bibunstructured': u'Daniele, V.G.: On the solution of two coupled Wiener\\u2013Hopf equations. SIAM J. Appl. Math. 44(4), 667\\u2013680 (1984)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Daniele', u'initials': u'VG'}, u'issueid': u'4', u'journaltitle': u'SIAM J. Appl. Math.', u'volumeid': u'44', u'firstpage': u'667', u'lastpage': u'680', u'year': u'1984', u'articletitle': {u'#text': u'On the solution of two coupled Wiener\\u2013Hopf equations', u'@language': u'En'}, u'occurrence': [{u'handle': u'750942', u'@type': u'AMSID'}, {u'handle': u'10.1137/0144048', u'@type': u'DOI'}]}, u'citationnumber': u'26.', u'@id': u'CR26'}")],
[('REFPLAINTEXT', u'Maurya, G.: On some problems involving multiple scattering due to edges, PhD Dissertation, Indian Institute of Technology Kanpur (2018)'), ('REFSTR', "{u'bibunstructured': u'Maurya, G.: On some problems involving multiple scattering due to edges, PhD Dissertation, Indian Institute of Technology Kanpur (2018)', u'citationnumber': u'27.', u'@id': u'CR27'}")],
[('REFPLAINTEXT', u'Sharma, B.L., Maurya, G.: Discrete scattering by a pair of parallel defects. Philos. Trans. R. Soc. A: Math. Phys. Eng. Sci. (2019).'), ('REFSTR', "{u'bibunstructured': {u'#text': u'Sharma, B.L., Maurya, G.: Discrete scattering by a pair of parallel defects. Philos. Trans. R. Soc. A: Math. Phys. Eng. Sci. (2019).', u'externalref': {u'refsource': u'https://doi.org/10.1098/rsta.2019.0102', u'reftarget': {u'@address': u'10.1098/rsta.2019.0102', u'@targettype': u'DOI'}}}, u'citationnumber': u'28.', u'@id': u'CR28'}")],
[('AUTHOR_FIRST_NAME', u'AE'), ('AUTHOR_LAST_NAME', u'Heins'), ('TITLE', u'The'), ('TITLE', u'radiation'), ('TITLE', u'and'), ('TITLE', u'transmission'), ('TITLE', u'properties'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'pair'), ('TITLE', u'of'), ('TITLE', u'semi-'), ('TITLE', u'infinite'), ('TITLE', u'parallel'), ('TITLE', u'plates.'), ('TITLE', u'I'), ('JOURNAL', u'Q.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'6'), ('YEAR', u'1948'), ('PAGE', u'157'), ('DOI', u'10.1090/qam/25981'), ('REFPLAINTEXT', u'Heins, A.E.: The radiation and transmission properties of a pair of semi-infinite parallel plates. I. Q. Appl. Math. 6, 157\u2013166 (1948)'), ('REFSTR', "{u'bibunstructured': u'Heins, A.E.: The radiation and transmission properties of a pair of semi-infinite parallel plates. I. Q. Appl. Math. 6, 157\\u2013166 (1948)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Heins', u'initials': u'AE'}, u'occurrence': [{u'handle': u'25981', u'@type': u'AMSID'}, {u'handle': u'10.1090/qam/25981', u'@type': u'DOI'}], u'journaltitle': u'Q. Appl. Math.', u'volumeid': u'6', u'firstpage': u'157', u'lastpage': u'166', u'year': u'1948', u'articletitle': {u'#text': u'The radiation and transmission properties of a pair of semi-infinite parallel plates. I', u'@language': u'En'}}, u'citationnumber': u'29.', u'@id': u'CR29'}")],
[('AUTHOR_FIRST_NAME', u'AE'), ('AUTHOR_LAST_NAME', u'Heins'), ('TITLE', u'The'), ('TITLE', u'radiation'), ('TITLE', u'and'), ('TITLE', u'transmission'), ('TITLE', u'properties'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'pair'), ('TITLE', u'of'), ('TITLE', u'semi-'), ('TITLE', u'infinite'), ('TITLE', u'parallel'), ('TITLE', u'plates.'), ('TITLE', u'II'), ('JOURNAL', u'Q.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'6'), ('YEAR', u'1948'), ('PAGE', u'215'), ('REFPLAINTEXT', u'Heins, A.E.: The radiation and transmission properties of a pair of semi-infinite parallel plates. II. Q. Appl. Math. 6, 215\u2013220 (1948)'), ('REFSTR', "{u'bibunstructured': u'Heins, A.E.: The radiation and transmission properties of a pair of semi-infinite parallel plates. II. Q. Appl. Math. 6, 215\\u2013220 (1948)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Heins', u'initials': u'AE'}, u'occurrence': {u'handle': u'10.1090/qam/26922', u'@type': u'DOI'}, u'journaltitle': u'Q. Appl. Math.', u'volumeid': u'6', u'firstpage': u'215', u'lastpage': u'220', u'year': u'1948', u'articletitle': {u'#text': u'The radiation and transmission properties of a pair of semi-infinite parallel plates. II', u'@language': u'En'}}, u'citationnumber': u'30.', u'@id': u'CR30'}")],
[('AUTHOR_FIRST_NAME', u'MJ'), ('AUTHOR_LAST_NAME', u'Ablowitz'), ('AUTHOR_FIRST_NAME', u'AS'), ('AUTHOR_LAST_NAME', u'Fokas'), ('YEAR', u'2003'), ('PUBLISHER', u'Complex'), ('PUBLISHER', u'Variables:'), ('PUBLISHER', u'Introduction'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Applications.'), ('PUBLISHER', u'Cambridge'), ('PUBLISHER', u'Texts'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Applied'), ('PUBLISHER', u'Mathematics'), ('VOLUME', u'2'), ('REFPLAINTEXT', u'Ablowitz, M.J., Fokas, A.S.: Complex Variables: Introduction and Applications. Cambridge Texts in Applied Mathematics, 2nd edn. Cambridge University Press, Cambridge (2003)'), ('REFSTR', "{u'bibunstructured': u'Ablowitz, M.J., Fokas, A.S.: Complex Variables: Introduction and Applications. Cambridge Texts in Applied Mathematics, 2nd edn. Cambridge University Press, Cambridge (2003)', u'citationnumber': u'31.', u'@id': u'CR31', u'bibbook': {u'bibauthorname': [{u'familyname': u'Ablowitz', u'initials': u'MJ'}, {u'familyname': u'Fokas', u'initials': u'AS'}], u'publisherlocation': u'Cambridge', u'occurrence': {u'handle': u'10.1017/CBO9780511791246', u'@type': u'DOI'}, u'booktitle': u'Complex Variables: Introduction and Applications. Cambridge Texts in Applied Mathematics', u'year': u'2003', u'editionnumber': u'2', u'publishername': u'Cambridge University Press'}}")],
[('AUTHOR_FIRST_NAME', u'LB'), ('AUTHOR_LAST_NAME', u'Felsen'), ('AUTHOR_FIRST_NAME', u'N'), ('AUTHOR_LAST_NAME', u'Marcuvitz'), ('YEAR', u'1973'), ('PUBLISHER', u'Radiation'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Scattering'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Waves.'), ('PUBLISHER', u'Microwaves'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Fields'), ('PUBLISHER', u'Series'), ('REFPLAINTEXT', u'Felsen, L.B., Marcuvitz, N.: Radiation and Scattering of Waves. Microwaves and Fields Series. Prentice-Hall, Inc., Englewood Cliffs (1973)'), ('REFSTR', "{u'bibunstructured': u'Felsen, L.B., Marcuvitz, N.: Radiation and Scattering of Waves. Microwaves and Fields Series. Prentice-Hall, Inc., Englewood Cliffs (1973)', u'citationnumber': u'32.', u'@id': u'CR32', u'bibbook': {u'publisherlocation': u'Englewood Cliffs', u'bibauthorname': [{u'familyname': u'Felsen', u'initials': u'LB'}, {u'familyname': u'Marcuvitz', u'initials': u'N'}], u'publishername': u'Prentice-Hall, Inc.', u'booktitle': u'Radiation and Scattering of Waves. Microwaves and Fields Series', u'year': u'1973'}}")],
[('AUTHOR_FIRST_NAME', u'BL'), ('AUTHOR_LAST_NAME', u'Sharma'), ('TITLE', u'Continuum'), ('TITLE', u'limit'), ('TITLE', u'of'), ('TITLE', u'discrete'), ('TITLE', u'Sommerfeld'), ('TITLE', u'problems'), ('TITLE', u'on'), ('TITLE', u'square'), ('TITLE', u'lattice'), ('JOURNAL', u'S&amacrdhan&amacr'), ('VOLUME', u'42'), ('ISSUE', u'5'), ('YEAR', u'2007'), ('PAGE', u'713'), ('REFPLAINTEXT', u'Sharma, B.L.: Continuum limit of discrete Sommerfeld problems on square lattice. S&amacrdhan&amacr 42(5), 713\u2013728 (2007)'), ('REFSTR', "{u'bibunstructured': u'Sharma, B.L.: Continuum limit of discrete Sommerfeld problems on square lattice. S&amacrdhan&amacr 42(5), 713\\u2013728 (2007)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Sharma', u'initials': u'BL'}, u'issueid': u'5', u'journaltitle': u'S&amacrdhan&amacr', u'volumeid': u'42', u'firstpage': u'713', u'lastpage': u'728', u'year': u'2007', u'articletitle': {u'#text': u'Continuum limit of discrete Sommerfeld problems on square lattice', u'@language': u'En'}, u'occurrence': [{u'handle': u'3659067', u'@type': u'AMSID'}, {u'handle': u'1381.35169', u'@type': u'ZLBID'}]}, u'citationnumber': u'33.', u'@id': u'CR33'}")],
[('AUTHOR_FIRST_NAME', u'L'), ('AUTHOR_LAST_NAME', u'Brillouin'), ('YEAR', u'1946'), ('PUBLISHER', u'Wave'), ('PUBLISHER', u'Propagation'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Periodic'), ('PUBLISHER', u'Structures'), ('REFPLAINTEXT', u'Brillouin, L.: Wave Propagation in Periodic Structures. Electric Filters and Crystal Lattices. McGraw-Hill Book Company Inc., New York (1946)'), ('REFSTR', "{u'bibunstructured': u'Brillouin, L.: Wave Propagation in Periodic Structures. Electric Filters and Crystal Lattices. McGraw-Hill Book Company Inc., New York (1946)', u'citationnumber': u'34.', u'@id': u'CR34', u'bibbook': {u'bibauthorname': {u'familyname': u'Brillouin', u'initials': u'L'}, u'publisherlocation': u'New York', u'occurrence': {u'handle': u'0063.00607', u'@type': u'ZLBID'}, u'booktitle': u'Wave Propagation in Periodic Structures', u'year': u'1946', u'publishername': u'Electric Filters and Crystal Lattices. McGraw-Hill Book Company Inc.'}}")],
[('AUTHOR_FIRST_NAME', u'BL'), ('AUTHOR_LAST_NAME', u'Sharma'), ('TITLE', u'Near-'), ('TITLE', u'tip'), ('TITLE', u'field'), ('TITLE', u'for'), ('TITLE', u'diffraction'), ('TITLE', u'on'), ('TITLE', u'square'), ('TITLE', u'lattice'), ('TITLE', u'by'), ('TITLE', u'rigid'), ('TITLE', u'constraint'), ('JOURNAL', u'Z.'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'66'), ('ISSUE', u'5'), ('YEAR', u'2015'), ('PAGE', u'2719'), ('DOI', u'10.1007/s00033-015-0508-z'), ('REFPLAINTEXT', u'Sharma, B.L.: Near-tip field for diffraction on square lattice by rigid constraint. Z. Angew. Math. Phys. 66(5), 2719\u20132740 (2015)'), ('REFSTR', "{u'bibunstructured': u'Sharma, B.L.: Near-tip field for diffraction on square lattice by rigid constraint. Z. Angew. Math. Phys. 66(5), 2719\\u20132740 (2015)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Sharma', u'initials': u'BL'}, u'issueid': u'5', u'journaltitle': u'Z. Angew. Math. Phys.', u'volumeid': u'66', u'firstpage': u'2719', u'lastpage': u'2740', u'year': u'2015', u'articletitle': {u'#text': u'Near-tip field for diffraction on square lattice by rigid constraint', u'@language': u'En'}, u'occurrence': [{u'handle': u'3412320', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-015-0508-z', u'@type': u'DOI'}]}, u'citationnumber': u'35.', u'@id': u'CR35'}")],
[('AUTHOR_FIRST_NAME', u'CJ'), ('AUTHOR_LAST_NAME', u'Bouwkamp'), ('TITLE', u'Diffraction'), ('TITLE', u'theory'), ('JOURNAL', u'Rep.'), ('JOURNAL', u'Prog.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'17'), ('YEAR', u'1954'), ('PAGE', u'35'), ('DOI', u'10.1088/0034-4885/17/1/302'), ('REFPLAINTEXT', u'Bouwkamp, C.J.: Diffraction theory. Rep. Prog. Phys. 17, 35\u2013100 (1954)'), ('REFSTR', "{u'bibunstructured': u'Bouwkamp, C.J.: Diffraction theory. Rep. Prog. Phys. 17, 35\\u2013100 (1954)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Bouwkamp', u'initials': u'CJ'}, u'occurrence': [{u'handle': u'63923', u'@type': u'AMSID'}, {u'handle': u'10.1088/0034-4885/17/1/302', u'@type': u'DOI'}], u'journaltitle': u'Rep. Prog. Phys.', u'volumeid': u'17', u'firstpage': u'35', u'lastpage': u'100', u'year': u'1954', u'articletitle': {u'#text': u'Diffraction theory', u'@language': u'En'}}, u'citationnumber': u'36.', u'@id': u'CR36'}")],
[('AUTHOR_FIRST_NAME', u'BL'), ('AUTHOR_LAST_NAME', u'Sharma'), ('TITLE', u'Diffraction'), ('TITLE', u'of'), ('TITLE', u'waves'), ('TITLE', u'on'), ('TITLE', u'square'), ('TITLE', u'lattice'), ('TITLE', u'by'), ('TITLE', u'semi-'), ('TITLE', u'infinite'), ('TITLE', u'rigid'), ('TITLE', u'constraint'), ('JOURNAL', u'Wave'), ('JOURNAL', u'Motion'), ('VOLUME', u'59'), ('YEAR', u'2015'), ('PAGE', u'52'), ('DOI', u'10.1016/j.wavemoti.2015.07.008'), ('REFPLAINTEXT', u'Sharma, B.L.: Diffraction of waves on square lattice by semi-infinite rigid constraint. Wave Motion 59, 52\u201368 (2015)'), ('REFSTR', "{u'bibunstructured': u'Sharma, B.L.: Diffraction of waves on square lattice by semi-infinite rigid constraint. Wave Motion 59, 52\\u201368 (2015)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Sharma', u'initials': u'BL'}, u'occurrence': [{u'handle': u'3411196', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.wavemoti.2015.07.008', u'@type': u'DOI'}], u'journaltitle': u'Wave Motion', u'volumeid': u'59', u'firstpage': u'52', u'lastpage': u'68', u'year': u'2015', u'articletitle': {u'#text': u'Diffraction of waves on square lattice by semi-infinite rigid constraint', u'@language': u'En'}}, u'citationnumber': u'37.', u'@id': u'CR37'}")],
[('AUTHOR_FIRST_NAME', u'H'), ('AUTHOR_LAST_NAME', u'Levy'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'Lessman'), ('YEAR', u'1993'), ('PUBLISHER', u'Finite'), ('PUBLISHER', u'Difference'), ('PUBLISHER', u'Equations'), ('REFPLAINTEXT', u'Levy, H., Lessman, F.: Finite Difference Equations. Dover Publications Inc, New York (1993). Reprint of the 1961 edition'), ('REFSTR', "{u'bibunstructured': u'Levy, H., Lessman, F.: Finite Difference Equations. Dover Publications Inc, New York (1993). Reprint of the 1961 edition', u'citationnumber': u'38.', u'@id': u'CR38', u'bibbook': {u'bibauthorname': [{u'familyname': u'Levy', u'initials': u'H'}, {u'familyname': u'Lessman', u'initials': u'F'}], u'publisherlocation': u'New York', u'occurrence': {u'handle': u'0092.07702', u'@type': u'ZLBID'}, u'booktitle': u'Finite Difference Equations', u'bibcomments': u'Reprint of the 1961 edition', u'year': u'1993', u'publishername': u'Dover Publications Inc'}}")],
[('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Elaydi'), ('YEAR', u'2005'), ('PUBLISHER', u'An'), ('PUBLISHER', u'Introduction'), ('PUBLISHER', u'to'), ('PUBLISHER', u'Difference'), ('PUBLISHER', u'Equations'), ('VOLUME', u'3'), ('REFPLAINTEXT', u'Elaydi, S.: An Introduction to Difference Equations, 3rd edn. Springer, New York (2005)'), ('REFSTR', "{u'bibunstructured': u'Elaydi, S.: An Introduction to Difference Equations, 3rd edn. Springer, New York (2005)', u'citationnumber': u'39.', u'@id': u'CR39', u'bibbook': {u'bibauthorname': {u'familyname': u'Elaydi', u'initials': u'S'}, u'publisherlocation': u'New York', u'occurrence': {u'handle': u'1071.39001', u'@type': u'ZLBID'}, u'booktitle': u'An Introduction to Difference Equations', u'year': u'2005', u'editionnumber': u'3', u'publishername': u'Springer'}}")],
[('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Bttcher'), ('AUTHOR_FIRST_NAME', u'B'), ('AUTHOR_LAST_NAME', u'Silbermann'), ('YEAR', u'2006'), ('PUBLISHER', u'Analysis'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Toeplitz'), ('PUBLISHER', u'Operators'), ('VOLUME', u'2'), ('REFPLAINTEXT', u'B\xf6ttcher, A., Silbermann, B.: Analysis of Toeplitz Operators, 2nd edn. Springer, Berlin (2006)'), ('REFSTR', "{u'bibunstructured': u'B\\xf6ttcher, A., Silbermann, B.: Analysis of Toeplitz Operators, 2nd edn. Springer, Berlin (2006)', u'citationnumber': u'40.', u'@id': u'CR40', u'bibbook': {u'bibauthorname': [{u'familyname': u'B\\xf6ttcher', u'initials': u'A'}, {u'familyname': u'Silbermann', u'initials': u'B'}], u'publisherlocation': u'Berlin', u'occurrence': {u'handle': u'1098.47002', u'@type': u'ZLBID'}, u'booktitle': u'Analysis of Toeplitz Operators', u'year': u'2006', u'editionnumber': u'2', u'publishername': u'Springer'}}")],
[('AUTHOR_FIRST_NAME', u'LC'), ('AUTHOR_LAST_NAME', u'Evans'), ('YEAR', u'2010'), ('PUBLISHER', u'Partial'), ('PUBLISHER', u'Differential'), ('PUBLISHER', u'Equations.'), ('PUBLISHER', u'Graduate'), ('PUBLISHER', u'Studies'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Mathematics'), ('VOLUME', u'2'), ('REFPLAINTEXT', u'Evans, L.C.: Partial Differential Equations. Graduate Studies in Mathematics, vol. 19, 2nd edn. American Mathematical Society, Providence (2010)'), ('REFSTR', "{u'bibunstructured': u'Evans, L.C.: Partial Differential Equations. Graduate Studies in Mathematics, vol. 19, 2nd edn. American Mathematical Society, Providence (2010)', u'citationnumber': u'41.', u'@id': u'CR41', u'bibbook': {u'bibauthorname': {u'familyname': u'Evans', u'initials': u'LC'}, u'publishername': u'American Mathematical Society', u'booktitle': u'Partial Differential Equations. Graduate Studies in Mathematics', u'year': u'2010', u'numberinseries': u'19', u'editionnumber': u'2', u'publisherlocation': u'Providence'}}")],
[('AUTHOR_FIRST_NAME', u'D'), ('AUTHOR_LAST_NAME', u'Gilbarg'), ('AUTHOR_FIRST_NAME', u'NS'), ('AUTHOR_LAST_NAME', u'Trudinger'), ('YEAR', u'1983'), ('PUBLISHER', u'Elliptic'), ('PUBLISHER', u'Partial'), ('PUBLISHER', u'Differential'), ('PUBLISHER', u'Equations'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Second'), ('PUBLISHER', u'Order'), ('PUBLISHER', u'Classics'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Mathematics'), ('REFPLAINTEXT', u'Gilbarg, D., Trudinger, N.S.: Elliptic Partial Differential Equations of Second Order Classics in Mathematics. Springer, Berlin (1983). Reprint of the 1998 edition'), ('REFSTR', "{u'bibunstructured': u'Gilbarg, D., Trudinger, N.S.: Elliptic Partial Differential Equations of Second Order Classics in Mathematics. Springer, Berlin (1983). Reprint of the 1998 edition', u'citationnumber': u'42.', u'@id': u'CR42', u'bibbook': {u'bibauthorname': [{u'familyname': u'Gilbarg', u'initials': u'D'}, {u'familyname': u'Trudinger', u'initials': u'NS'}], u'publisherlocation': u'Berlin', u'occurrence': {u'handle': u'10.1007/978-3-642-61798-0', u'@type': u'DOI'}, u'booktitle': u'Elliptic Partial Differential Equations of Second Order Classics in Mathematics', u'bibcomments': u'Reprint of the 1998 edition', u'year': u'1983', u'publishername': u'Springer'}}")],
[('YEAR', u'1986'), ('PUBLISHER', u'Constructive'), ('PUBLISHER', u'Methods'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Wiener\u2013Hopf'), ('PUBLISHER', u'Factorization.'), ('PUBLISHER', u'Operator'), ('PUBLISHER', u'Theory:'), ('PUBLISHER', u'Advances'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Applications'), ('REFPLAINTEXT', u'Gohberg, I., Kaashoek, M.A. (eds.): Constructive Methods of Wiener\u2013Hopf Factorization. Operator Theory: Advances and Applications, vol. 21. Birkh\xe4user Verlag, Basel (1986)'), ('REFSTR', "{u'bibunstructured': u'Gohberg, I., Kaashoek, M.A. (eds.): Constructive Methods of Wiener\\u2013Hopf Factorization. Operator Theory: Advances and Applications, vol. 21. Birkh\\xe4user Verlag, Basel (1986)', u'citationnumber': u'43.', u'@id': u'CR43', u'bibbook': {u'eds': {u'publisherlocation': u'Basel', u'booktitle': u'Constructive Methods of Wiener\\u2013Hopf Factorization. Operator Theory: Advances and Applications', u'numberinseries': u'21', u'publishername': u'Birkh\\xe4user Verlag', u'year': u'1986'}, u'bibeditorname': [{u'familyname': u'Gohberg', u'initials': u'I'}, {u'familyname': u'Kaashoek', u'initials': u'MA'}]}}")],
[('REFPLAINTEXT', u'Gakhov, F.D.: Boundary Value Problems. Dover Publications, Inc., New York. Translated from the Russian, Reprint of the 1966 translation'), ('REFSTR', "{u'bibunstructured': u'Gakhov, F.D.: Boundary Value Problems. Dover Publications, Inc., New York. Translated from the Russian, Reprint of the 1966 translation', u'citationnumber': u'44.', u'@id': u'CR44'}")],
[('AUTHOR_FIRST_NAME', u'R'), ('AUTHOR_LAST_NAME', u'Mitra'), ('AUTHOR_FIRST_NAME', u'SW'), ('AUTHOR_LAST_NAME', u'Lee'), ('YEAR', u'1971'), ('PUBLISHER', u'Analytical'), ('PUBLISHER', u'Techniques'), ('PUBLISHER', u'in'), ('PUBLISHER', u'the'), ('PUBLISHER', u'Theory'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Guided'), ('PUBLISHER', u'Waves'), ('REFPLAINTEXT', u'Mitra, R., Lee, S.W.: Analytical Techniques in the Theory of Guided Waves. Macmillan, New York (1971)'), ('REFSTR', "{u'bibunstructured': u'Mitra, R., Lee, S.W.: Analytical Techniques in the Theory of Guided Waves. Macmillan, New York (1971)', u'citationnumber': u'45.', u'@id': u'CR45', u'bibbook': {u'publisherlocation': u'New York', u'bibauthorname': [{u'familyname': u'Mitra', u'initials': u'R'}, {u'familyname': u'Lee', u'initials': u'SW'}], u'publishername': u'Macmillan', u'booktitle': u'Analytical Techniques in the Theory of Guided Waves', u'year': u'1971'}}")],
[('AUTHOR_FIRST_NAME', u'JG'), ('AUTHOR_LAST_NAME', u'Harris'), ('YEAR', u'2001'), ('PUBLISHER', u'Linear'), ('PUBLISHER', u'Elastic'), ('PUBLISHER', u'Waves'), ('REFPLAINTEXT', u'Harris, J.G.: Linear Elastic Waves, vol. 26. Cambridge University Press, Cambridge (2001)'), ('REFSTR', "{u'bibunstructured': u'Harris, J.G.: Linear Elastic Waves, vol. 26. Cambridge University Press, Cambridge (2001)', u'citationnumber': u'46.', u'@id': u'CR46', u'bibbook': {u'bibauthorname': {u'familyname': u'Harris', u'initials': u'JG'}, u'publisherlocation': u'Cambridge', u'occurrence': {u'handle': u'10.1017/CBO9780511755415', u'@type': u'DOI'}, u'booktitle': u'Linear Elastic Waves', u'year': u'2001', u'numberinseries': u'26', u'publishername': u'Cambridge University Press'}}")],
[('REFPLAINTEXT', u'Collatz, L.: The Numerical Treatment of Differential Equations, 3d edn. Translated from a supplemented version of the 2d German edition by P. G. Williams. Die Grundlehren der mathematischen Wissenschaften, Bd. 60. Springer, Berlin-G\xf6ttingen-Heidelberg'), ('REFSTR', "{u'bibunstructured': u'Collatz, L.: The Numerical Treatment of Differential Equations, 3d edn. Translated from a supplemented version of the 2d German edition by P. G. Williams. Die Grundlehren der mathematischen Wissenschaften, Bd. 60. Springer, Berlin-G\\xf6ttingen-Heidelberg', u'citationnumber': u'47.', u'@id': u'CR47'}")],
[('AUTHOR_FIRST_NAME', u'JC'), ('AUTHOR_LAST_NAME', u'Mason'), ('AUTHOR_FIRST_NAME', u'DC'), ('AUTHOR_LAST_NAME', u'Handscomb'), ('YEAR', u'2002'), ('PUBLISHER', u'Chebyshev'), ('PUBLISHER', u'Polynomials'), ('REFPLAINTEXT', u'Mason, J.C., Handscomb, D.C.: Chebyshev Polynomials. Chapman & Hall, Boca Raton (2002)'), ('REFSTR', "{u'bibunstructured': u'Mason, J.C., Handscomb, D.C.: Chebyshev Polynomials. Chapman & Hall, Boca Raton (2002)', u'citationnumber': u'48.', u'@id': u'CR48', u'bibbook': {u'bibauthorname': [{u'familyname': u'Mason', u'initials': u'JC'}, {u'familyname': u'Handscomb', u'initials': u'DC'}], u'publisherlocation': u'Boca Raton', u'occurrence': {u'handle': u'10.1201/9781420036114', u'@type': u'DOI'}, u'booktitle': u'Chebyshev Polynomials', u'year': u'2002', u'publishername': u'Chapman & Hall'}}")],
[('AUTHOR_FIRST_NAME', u'BL'), ('AUTHOR_LAST_NAME', u'Sharma'), ('TITLE', u'On'), ('TITLE', u'linear'), ('TITLE', u'waveguides'), ('TITLE', u'of'), ('TITLE', u'square'), ('TITLE', u'and'), ('TITLE', u'triangular'), ('TITLE', u'lattice'), ('TITLE', u'strips:'), ('TITLE', u'an'), ('TITLE', u'application'), ('TITLE', u'of'), ('TITLE', u'Chebyshev'), ('TITLE', u'polynomials'), ('JOURNAL', u'S&amacrdhan&amacr'), ('VOLUME', u'42'), ('ISSUE', u'6'), ('YEAR', u'2017'), ('PAGE', u'901'), ('REFPLAINTEXT', u'Sharma, B.L.: On linear waveguides of square and triangular lattice strips: an application of Chebyshev polynomials. S&amacrdhan&amacr 42(6), 901\u2013927 (2017)'), ('REFSTR', "{u'bibunstructured': u'Sharma, B.L.: On linear waveguides of square and triangular lattice strips: an application of Chebyshev polynomials. S&amacrdhan&amacr 42(6), 901\\u2013927 (2017)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Sharma', u'initials': u'BL'}, u'issueid': u'6', u'journaltitle': u'S&amacrdhan&amacr', u'volumeid': u'42', u'firstpage': u'901', u'lastpage': u'927', u'year': u'2017', u'articletitle': {u'#text': u'On linear waveguides of square and triangular lattice strips: an application of Chebyshev polynomials', u'@language': u'En'}, u'occurrence': [{u'handle': u'3670951', u'@type': u'AMSID'}, {u'handle': u'1390.78026', u'@type': u'ZLBID'}]}, u'citationnumber': u'49.', u'@id': u'CR49'}")],
[('YEAR', u'1974'), ('PUBLISHER', u'Handbook'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Mathematical'), ('PUBLISHER', u'Functions'), ('PUBLISHER', u'with'), ('PUBLISHER', u'Formulas,'), ('PUBLISHER', u'Graphs,'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Mathematical'), ('PUBLISHER', u'Tables'), ('REFPLAINTEXT', u'Abramowitz, M., Stegun, I.A. (eds.): Handbook of Mathematical Functions with Formulas, Graphs, and Mathematical Tables. Dover, New York (1974)'), ('REFSTR', "{u'bibunstructured': u'Abramowitz, M., Stegun, I.A. (eds.): Handbook of Mathematical Functions with Formulas, Graphs, and Mathematical Tables. Dover, New York (1974)', u'citationnumber': u'50.', u'@id': u'CR50', u'bibbook': {u'eds': {u'publisherlocation': u'New York', u'booktitle': u'Handbook of Mathematical Functions with Formulas, Graphs, and Mathematical Tables', u'publishername': u'Dover', u'occurrence': {u'handle': u'0171.38503', u'@type': u'ZLBID'}, u'year': u'1974'}, u'bibeditorname': [{u'familyname': u'Abramowitz', u'initials': u'M'}, {u'familyname': u'Stegun', u'initials': u'IA'}]}}")],
[('AUTHOR_FIRST_NAME', u'BL'), ('AUTHOR_LAST_NAME', u'Sharma'), ('TITLE', u'Wave'), ('TITLE', u'propagation'), ('TITLE', u'in'), ('TITLE', u'bifurcated'), ('TITLE', u'waveguides'), ('TITLE', u'of'), ('TITLE', u'square'), ('TITLE', u'lattice'), ('TITLE', u'strips'), ('JOURNAL', u'SIAM'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'76'), ('ISSUE', u'4'), ('YEAR', u'2016'), ('PAGE', u'1355'), ('DOI', u'10.1137/15M1051464'), ('REFPLAINTEXT', u'Sharma, B.L.: Wave propagation in bifurcated waveguides of square lattice strips. SIAM J. Appl. Math. 76(4), 1355\u20131381 (2016)'), ('REFSTR', "{u'bibunstructured': u'Sharma, B.L.: Wave propagation in bifurcated waveguides of square lattice strips. SIAM J. Appl. Math. 76(4), 1355\\u20131381 (2016)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Sharma', u'initials': u'BL'}, u'issueid': u'4', u'journaltitle': u'SIAM J. Appl. Math.', u'volumeid': u'76', u'firstpage': u'1355', u'lastpage': u'1381', u'year': u'2016', u'articletitle': {u'#text': u'Wave propagation in bifurcated waveguides of square lattice strips', u'@language': u'En'}, u'occurrence': [{u'handle': u'3527694', u'@type': u'AMSID'}, {u'handle': u'10.1137/15M1051464', u'@type': u'DOI'}]}, u'citationnumber': u'51.', u'@id': u'CR51'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Acheritogaray'), ('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Degond'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Frouvelle'), ('AUTHOR_FIRST_NAME', u'JG'), ('AUTHOR_LAST_NAME', u'Liu'), ('TITLE', u'Kinetic'), ('TITLE', u'formulation'), ('TITLE', u'and'), ('TITLE', u'global'), ('TITLE', u'existence'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'Hall-'), ('TITLE', u'Magneto-'), ('TITLE', u'hydrodynamics'), ('TITLE', u'system'), ('JOURNAL', u'Kinet.'), ('JOURNAL', u'Relat.'), ('JOURNAL', u'Models'), ('VOLUME', u'4'), ('YEAR', u'2011'), ('PAGE', u'901'), ('DOI', u'10.3934/krm.2011.4.901'), ('REFPLAINTEXT', u'Acheritogaray, M., Degond, P., Frouvelle, A., Liu, J.G.: Kinetic formulation and global existence for the Hall-Magneto-hydrodynamics system. Kinet. Relat. Models 4, 901\u2013918 (2011)'), ('REFSTR', "{u'bibunstructured': u'Acheritogaray, M., Degond, P., Frouvelle, A., Liu, J.G.: Kinetic formulation and global existence for the Hall-Magneto-hydrodynamics system. Kinet. Relat. Models 4, 901\\u2013918 (2011)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Acheritogaray', u'initials': u'M'}, {u'familyname': u'Degond', u'initials': u'P'}, {u'familyname': u'Frouvelle', u'initials': u'A'}, {u'familyname': u'Liu', u'initials': u'JG'}], u'occurrence': [{u'handle': u'2861579', u'@type': u'AMSID'}, {u'handle': u'10.3934/krm.2011.4.901', u'@type': u'DOI'}], u'journaltitle': u'Kinet. Relat. Models', u'volumeid': u'4', u'firstpage': u'901', u'lastpage': u'918', u'year': u'2011', u'articletitle': {u'#text': u'Kinetic formulation and global existence for the Hall-Magneto-hydrodynamics system', u'@outputmedium': u'All', u'@language': u'En'}}, u'citationnumber': u'1.', u'@id': u'CR1'}")],
[('AUTHOR_FIRST_NAME', u'RA'), ('AUTHOR_LAST_NAME', u'Adams'), ('YEAR', u'1975'), ('PUBLISHER', u'Sobolev'), ('PUBLISHER', u'Space'), ('REFPLAINTEXT', u'Adams, R.A.: Sobolev Space. Academic Press, New York (1975)'), ('REFSTR', "{u'bibunstructured': u'Adams, R.A.: Sobolev Space. Academic Press, New York (1975)', u'citationnumber': u'2.', u'@id': u'CR2', u'bibbook': {u'publisherlocation': u'New York', u'bibauthorname': {u'familyname': u'Adams', u'initials': u'RA'}, u'publishername': u'Academic Press', u'booktitle': u'Sobolev Space', u'year': u'1975'}}")],
[('AUTHOR_FIRST_NAME', u'SA'), ('AUTHOR_LAST_NAME', u'Balbus'), ('AUTHOR_FIRST_NAME', u'C'), ('AUTHOR_LAST_NAME', u'Terquem'), ('TITLE', u'Linear'), ('TITLE', u'analysis'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'Hall'), ('TITLE', u'effect'), ('TITLE', u'in'), ('TITLE', u'protostellar'), ('TITLE', u'disks'), ('JOURNAL', u'Astrophys.'), ('JOURNAL', u'J.'), ('VOLUME', u'552'), ('YEAR', u'2001'), ('PAGE', u'235'), ('REFPLAINTEXT', u'Balbus, S.A., Terquem, C.: Linear analysis of the Hall effect in protostellar disks. Astrophys. J. 552, 235\u2013247 (2001)'), ('REFSTR', "{u'bibunstructured': u'Balbus, S.A., Terquem, C.: Linear analysis of the Hall effect in protostellar disks. Astrophys. J. 552, 235\\u2013247 (2001)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Balbus', u'initials': u'SA'}, {u'familyname': u'Terquem', u'initials': u'C'}], u'occurrence': {u'handle': u'10.1086/320452', u'@type': u'DOI'}, u'journaltitle': u'Astrophys. J.', u'volumeid': u'552', u'firstpage': u'235', u'lastpage': u'247', u'year': u'2001', u'articletitle': {u'#text': u'Linear analysis of the Hall effect in protostellar disks', u'@language': u'En'}}, u'citationnumber': u'3.', u'@id': u'CR3'}")],
[('AUTHOR_FIRST_NAME', u'DH'), ('AUTHOR_LAST_NAME', u'Chae'), ('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Degond'), ('AUTHOR_FIRST_NAME', u'JG'), ('AUTHOR_LAST_NAME', u'Liu'), ('TITLE', u'Well-'), ('TITLE', u'posedness'), ('TITLE', u'for'), ('TITLE', u'Hall-'), ('TITLE', u'magnetohydrodynamics'), ('JOURNAL', u'Ann.'), ('JOURNAL', u'Inst.'), ('JOURNAL', u'H.'), ('JOURNAL', u'Poincar'), ('JOURNAL', u'Anal.'), ('JOURNAL', u'Non'), ('JOURNAL', u'Lin\xe9aire'), ('VOLUME', u'31'), ('YEAR', u'2014'), ('PAGE', u'555'), ('DOI', u'10.1016/j.anihpc.2013.04.006'), ('REFPLAINTEXT', u'Chae, D.H., Degond, P., Liu, J.G.: Well-posedness for Hall-magnetohydrodynamics. Ann. Inst. H. Poincar Anal. Non Lin\xe9aire 31, 555\u2013565 (2014)'), ('REFSTR', "{u'bibunstructured': u'Chae, D.H., Degond, P., Liu, J.G.: Well-posedness for Hall-magnetohydrodynamics. Ann. Inst. H. Poincar Anal. Non Lin\\xe9aire 31, 555\\u2013565 (2014)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Chae', u'initials': u'DH'}, {u'familyname': u'Degond', u'initials': u'P'}, {u'familyname': u'Liu', u'initials': u'JG'}], u'occurrence': [{u'handle': u'3208454', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.anihpc.2013.04.006', u'@type': u'DOI'}], u'journaltitle': u'Ann. Inst. H. Poincar Anal. Non Lin\\xe9aire', u'volumeid': u'31', u'firstpage': u'555', u'lastpage': u'565', u'year': u'2014', u'articletitle': {u'#text': u'Well-posedness for Hall-magnetohydrodynamics', u'@language': u'En'}}, u'citationnumber': u'4.', u'@id': u'CR4'}")],
[('AUTHOR_FIRST_NAME', u'DH'), ('AUTHOR_LAST_NAME', u'Chae'), ('AUTHOR_FIRST_NAME', u'JH'), ('AUTHOR_LAST_NAME', u'Lee'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'blow-'), ('TITLE', u'up'), ('TITLE', u'criterion'), ('TITLE', u'and'), ('TITLE', u'small'), ('TITLE', u'data'), ('TITLE', u'global'), ('TITLE', u'existence'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'Hall-'), ('TITLE', u'magnetohydrodynamics'), ('JOURNAL', u'J.'), ('JOURNAL', u'Differ.'), ('JOURNAL', u'Equ.'), ('VOLUME', u'256'), ('YEAR', u'2014'), ('PAGE', u'3835'), ('DOI', u'10.1016/j.jde.2014.03.003'), ('REFPLAINTEXT', u'Chae, D.H., Lee, J.H.: On the blow-up criterion and small data global existence for the Hall-magnetohydrodynamics. J. Differ. Equ. 256, 3835\u20133858 (2014)'), ('REFSTR', "{u'bibunstructured': u'Chae, D.H., Lee, J.H.: On the blow-up criterion and small data global existence for the Hall-magnetohydrodynamics. J. Differ. Equ. 256, 3835\\u20133858 (2014)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Chae', u'initials': u'DH'}, {u'familyname': u'Lee', u'initials': u'JH'}], u'occurrence': [{u'handle': u'3186849', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.jde.2014.03.003', u'@type': u'DOI'}], u'journaltitle': u'J. Differ. Equ.', u'volumeid': u'256', u'firstpage': u'3835', u'lastpage': u'3858', u'year': u'2014', u'articletitle': {u'#text': u'On the blow-up criterion and small data global existence for the Hall-magnetohydrodynamics', u'@language': u'En'}}, u'citationnumber': u'5.', u'@id': u'CR5'}")],
[('AUTHOR_FIRST_NAME', u'DH'), ('AUTHOR_LAST_NAME', u'Chae'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Schonbek'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'temporal'), ('TITLE', u'decay'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'Hall-'), ('TITLE', u'magnetohydrodynamic'), ('TITLE', u'equatioins'), ('JOURNAL', u'J.'), ('JOURNAL', u'Differ.'), ('JOURNAL', u'Equ.'), ('VOLUME', u'255'), ('ISSUE', u'11'), ('YEAR', u'2013'), ('PAGE', u'3971'), ('REFPLAINTEXT', u'Chae, D.H., Schonbek, M.: On the temporal decay for the Hall-magnetohydrodynamic equatioins. J. Differ. Equ. 255(11), 3971\u20133982 (2013)'), ('REFSTR', "{u'bibunstructured': u'Chae, D.H., Schonbek, M.: On the temporal decay for the Hall-magnetohydrodynamic equatioins. J. Differ. Equ. 255(11), 3971\\u20133982 (2013)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Chae', u'initials': u'DH'}, {u'familyname': u'Schonbek', u'initials': u'M'}], u'issueid': u'11', u'journaltitle': u'J. Differ. Equ.', u'volumeid': u'255', u'firstpage': u'3971', u'lastpage': u'3982', u'year': u'2013', u'articletitle': {u'#text': u'On the temporal decay for the Hall-magnetohydrodynamic equatioins', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1016/j.jde.2013.07.059', u'@type': u'DOI'}}, u'citationnumber': u'6.', u'@id': u'CR6'}")],
[('AUTHOR_FIRST_NAME', u'DH'), ('AUTHOR_LAST_NAME', u'Chae'), ('AUTHOR_FIRST_NAME', u'RH'), ('AUTHOR_LAST_NAME', u'Wan'), ('AUTHOR_FIRST_NAME', u'JH'), ('AUTHOR_LAST_NAME', u'Wu'), ('TITLE', u'Local'), ('TITLE', u'well-'), ('TITLE', u'posedness'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'Hall-'), ('TITLE', u'MHD'), ('TITLE', u'equations'), ('TITLE', u'with'), ('TITLE', u'fractional'), ('TITLE', u'magnetic'), ('TITLE', u'diffusion'), ('JOURNAL', u'J.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Fluid'), ('JOURNAL', u'Mech.'), ('VOLUME', u'17'), ('YEAR', u'2015'), ('PAGE', u'627'), ('DOI', u'10.1007/s00021-015-0222-9'), ('REFPLAINTEXT', u'Chae, D.H., Wan, R.H., Wu, J.H.: Local well-posedness for the Hall-MHD equations with fractional magnetic diffusion. J. Math. Fluid Mech. 17, 627\u2013638 (2015)'), ('REFSTR', "{u'bibunstructured': u'Chae, D.H., Wan, R.H., Wu, J.H.: Local well-posedness for the Hall-MHD equations with fractional magnetic diffusion. J. Math. Fluid Mech. 17, 627\\u2013638 (2015)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Chae', u'initials': u'DH'}, {u'familyname': u'Wan', u'initials': u'RH'}, {u'familyname': u'Wu', u'initials': u'JH'}], u'occurrence': [{u'handle': u'3412271', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00021-015-0222-9', u'@type': u'DOI'}], u'journaltitle': u'J. Math. Fluid Mech.', u'volumeid': u'17', u'firstpage': u'627', u'lastpage': u'638', u'year': u'2015', u'articletitle': {u'#text': u'Local well-posedness for the Hall-MHD equations with fractional magnetic diffusion', u'@language': u'En'}}, u'citationnumber': u'7.', u'@id': u'CR7'}")],
[('AUTHOR_FIRST_NAME', u'DH'), ('AUTHOR_LAST_NAME', u'Chae'), ('AUTHOR_FIRST_NAME', u'SK'), ('AUTHOR_LAST_NAME', u'Weng'), ('TITLE', u'Singularity'), ('TITLE', u'formation'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'incompressible'), ('TITLE', u'Hall-'), ('TITLE', u'MHD'), ('TITLE', u'equations'), ('TITLE', u'without'), ('TITLE', u'resistivity'), ('JOURNAL', u'Ann.'), ('JOURNAL', u'Inst.'), ('JOURNAL', u'H.'), ('JOURNAL', u'Poincar\xe9'), ('JOURNAL', u'Anal.'), ('JOURNAL', u'Non'), ('JOURNAL', u'Lin\xe9aire'), ('VOLUME', u'4'), ('YEAR', u'2016'), ('PAGE', u'1009'), ('DOI', u'10.1016/j.anihpc.2015.03.002'), ('REFPLAINTEXT', u'Chae, D.H., Weng, S.K.: Singularity formation for the incompressible Hall-MHD equations without resistivity. Ann. Inst. H. Poincar\xe9 Anal. Non Lin\xe9aire 4, 1009\u20131022 (2016)'), ('REFSTR', "{u'bibunstructured': u'Chae, D.H., Weng, S.K.: Singularity formation for the incompressible Hall-MHD equations without resistivity. Ann. Inst. H. Poincar\\xe9 Anal. Non Lin\\xe9aire 4, 1009\\u20131022 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Chae', u'initials': u'DH'}, {u'familyname': u'Weng', u'initials': u'SK'}], u'occurrence': [{u'handle': u'3519529', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.anihpc.2015.03.002', u'@type': u'DOI'}], u'journaltitle': u'Ann. Inst. H. Poincar\\xe9 Anal. Non Lin\\xe9aire', u'volumeid': u'4', u'firstpage': u'1009', u'lastpage': u'1022', u'year': u'2016', u'articletitle': {u'#text': u'Singularity formation for the incompressible Hall-MHD equations without resistivity', u'@language': u'En'}}, u'citationnumber': u'8.', u'@id': u'CR8'}")],
[('AUTHOR_FIRST_NAME', u'DH'), ('AUTHOR_LAST_NAME', u'Chae'), ('AUTHOR_FIRST_NAME', u'J'), ('AUTHOR_LAST_NAME', u'Wolf'), ('TITLE', u'On'), ('TITLE', u'partial'), ('TITLE', u'regularity'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'3D'), ('TITLE', u'nonstationary'), ('TITLE', u'Hall'), ('TITLE', u'magnetohydrodynamics'), ('TITLE', u'equations'), ('TITLE', u'on'), ('TITLE', u'the'), ('TITLE', u'plane'), ('JOURNAL', u'SIAM'), ('JOURNAL', u'J.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Anal.'), ('VOLUME', u'48'), ('YEAR', u'2016'), ('PAGE', u'443'), ('DOI', u'10.1137/15M1012037'), ('REFPLAINTEXT', u'Chae, D.H., Wolf, J.: On partial regularity for the 3D nonstationary Hall magnetohydrodynamics equations on the plane. SIAM J. Math. Anal. 48, 443\u2013469 (2016)'), ('REFSTR', "{u'bibunstructured': u'Chae, D.H., Wolf, J.: On partial regularity for the 3D nonstationary Hall magnetohydrodynamics equations on the plane. SIAM J. Math. Anal. 48, 443\\u2013469 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Chae', u'initials': u'DH'}, {u'familyname': u'Wolf', u'initials': u'J'}], u'occurrence': [{u'handle': u'3455137', u'@type': u'AMSID'}, {u'handle': u'10.1137/15M1012037', u'@type': u'DOI'}], u'journaltitle': u'SIAM J. Math. Anal.', u'volumeid': u'48', u'firstpage': u'443', u'lastpage': u'469', u'year': u'2016', u'articletitle': {u'#text': u'On partial regularity for the 3D nonstationary Hall magnetohydrodynamics equations on the plane', u'@language': u'En'}}, u'citationnumber': u'9.', u'@id': u'CR9'}")],
[('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'Crispo'), ('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Maremonti'), ('TITLE', u'An'), ('TITLE', u'interpolation'), ('TITLE', u'inequality'), ('TITLE', u'in'), ('TITLE', u'exterior'), ('TITLE', u'domains'), ('JOURNAL', u'Rend.'), ('JOURNAL', u'Sem.'), ('JOURNAL', u'Mat.'), ('JOURNAL', u'Univ.'), ('JOURNAL', u'Padova'), ('VOLUME', u'112'), ('YEAR', u'2004'), ('PAGE', u'11'), ('REFPLAINTEXT', u'Crispo, F., Maremonti, P.: An interpolation inequality in exterior domains. Rend. Sem. Mat. Univ. Padova 112, 11\u201339 (2004)'), ('REFSTR', "{u'bibunstructured': u'Crispo, F., Maremonti, P.: An interpolation inequality in exterior domains. Rend. Sem. Mat. Univ. Padova 112, 11\\u201339 (2004)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Crispo', u'initials': u'F'}, {u'familyname': u'Maremonti', u'initials': u'P'}], u'occurrence': [{u'handle': u'2109950', u'@type': u'AMSID'}, {u'handle': u'1105.35150', u'@type': u'ZLBID'}], u'journaltitle': u'Rend. Sem. Mat. Univ. Padova', u'volumeid': u'112', u'firstpage': u'11', u'lastpage': u'39', u'year': u'2004', u'articletitle': {u'#text': u'An interpolation inequality in exterior domains', u'@language': u'En'}}, u'citationnumber': u'10.', u'@id': u'CR10'}")],
[('AUTHOR_FIRST_NAME', u'RJ'), ('AUTHOR_LAST_NAME', u'Duan'), ('AUTHOR_FIRST_NAME', u'HX'), ('AUTHOR_LAST_NAME', u'Liu'), ('AUTHOR_FIRST_NAME', u'SJ'), ('AUTHOR_LAST_NAME', u'Ukai'), ('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Yang'), ('TITLE', u'Optimal'), ('TITLE', u'L^p-'), ('TITLE', u'L^q'), ('TITLE', u'convergence'), ('TITLE', u'rates'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'compressible'), ('TITLE', u'NavierStokes'), ('TITLE', u'equations'), ('TITLE', u'with'), ('TITLE', u'potential'), ('TITLE', u'force'), ('JOURNAL', u'J.'), ('JOURNAL', u'Differ.'), ('JOURNAL', u'Equ.'), ('VOLUME', u'238'), ('YEAR', u'2007'), ('PAGE', u'220'), ('REFPLAINTEXT', u'Duan, R.J., Liu, H.X., Ukai, S.J., Yang, T.: Optimal L^p-L^q convergence rates for the compressible Navier\u2013Stokes equations with potential force. J. Differ. Equ. 238, 220\u2013233 (2007)'), ('REFSTR', "{u'bibunstructured': u'Duan, R.J., Liu, H.X., Ukai, S.J., Yang, T.: Optimal L^p-L^q convergence rates for the compressible Navier\\u2013Stokes equations with potential force. J. Differ. Equ. 238, 220\\u2013233 (2007)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Duan', u'initials': u'RJ'}, {u'familyname': u'Liu', u'initials': u'HX'}, {u'familyname': u'Ukai', u'initials': u'SJ'}, {u'familyname': u'Yang', u'initials': u'T'}], u'occurrence': {u'handle': u'10.1016/j.jde.2007.03.008', u'@type': u'DOI'}, u'journaltitle': u'J. Differ. Equ.', u'volumeid': u'238', u'firstpage': u'220', u'lastpage': u'233', u'year': u'2007', u'articletitle': {u'#text': u'Optimal L^p-L^q convergence rates for the compressible Navier\\u2013Stokes equations with potential force', u'@language': u'En'}}, u'citationnumber': u'11.', u'@id': u'CR11'}")],
[('AUTHOR_FIRST_NAME', u'JS'), ('AUTHOR_LAST_NAME', u'Fan'), ('AUTHOR_FIRST_NAME', u'B'), ('AUTHOR_LAST_NAME', u'Ahmad'), ('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Hayat'), ('AUTHOR_FIRST_NAME', u'Y'), ('AUTHOR_LAST_NAME', u'Zhou'), ('TITLE', u'On'), ('TITLE', u'well-'), ('TITLE', u'posedness'), ('TITLE', u'and'), ('TITLE', u'blow-'), ('TITLE', u'up'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'full'), ('TITLE', u'compressible'), ('TITLE', u'Hall-'), ('TITLE', u'MHD'), ('TITLE', u'system'), ('JOURNAL', u'Nonlinear'), ('JOURNAL', u'Anal.'), ('JOURNAL', u'Real'), ('JOURNAL', u'World'), ('JOURNAL', u'Appl.'), ('VOLUME', u'31'), ('YEAR', u'2016'), ('PAGE', u'569'), ('DOI', u'10.1016/j.nonrwa.2016.03.003'), ('REFPLAINTEXT', u'Fan, J.S., Ahmad, B., Hayat, T., Zhou, Y.: On well-posedness and blow-up for the full compressible Hall-MHD system. Nonlinear Anal. Real World Appl. 31, 569\u2013579 (2016)'), ('REFSTR', "{u'bibunstructured': u'Fan, J.S., Ahmad, B., Hayat, T., Zhou, Y.: On well-posedness and blow-up for the full compressible Hall-MHD system. Nonlinear Anal. Real World Appl. 31, 569\\u2013579 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Fan', u'initials': u'JS'}, {u'familyname': u'Ahmad', u'initials': u'B'}, {u'familyname': u'Hayat', u'initials': u'T'}, {u'familyname': u'Zhou', u'initials': u'Y'}], u'occurrence': [{u'handle': u'3490858', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.nonrwa.2016.03.003', u'@type': u'DOI'}], u'journaltitle': u'Nonlinear Anal. Real World Appl.', u'volumeid': u'31', u'firstpage': u'569', u'lastpage': u'579', u'year': u'2016', u'articletitle': {u'#text': u'On well-posedness and blow-up for the full compressible Hall-MHD system', u'@language': u'En'}}, u'citationnumber': u'12.', u'@id': u'CR12'}")],
[('AUTHOR_FIRST_NAME', u'JS'), ('AUTHOR_LAST_NAME', u'Fan'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Alsaedi'), ('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Hayat'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Nakamura'), ('AUTHOR_FIRST_NAME', u'Y'), ('AUTHOR_LAST_NAME', u'Zhou'), ('TITLE', u'On'), ('TITLE', u'strong'), ('TITLE', u'solutions'), ('TITLE', u'to'), ('TITLE', u'the'), ('TITLE', u'compressible'), ('TITLE', u'Hall-'), ('TITLE', u'magnetohydrodynamic'), ('TITLE', u'system'), ('JOURNAL', u'Nonlinear'), ('JOURNAL', u'Anal.'), ('JOURNAL', u'Real'), ('JOURNAL', u'World'), ('JOURNAL', u'Appl.'), ('VOLUME', u'22'), ('YEAR', u'2015'), ('PAGE', u'423'), ('DOI', u'10.1016/j.nonrwa.2014.10.003'), ('REFPLAINTEXT', u'Fan, J.S., Alsaedi, A., Hayat, T., Nakamura, G., Zhou, Y.: On strong solutions to the compressible Hall-magnetohydrodynamic system. Nonlinear Anal. Real World Appl. 22, 423\u2013434 (2015)'), ('REFSTR', "{u'bibunstructured': u'Fan, J.S., Alsaedi, A., Hayat, T., Nakamura, G., Zhou, Y.: On strong solutions to the compressible Hall-magnetohydrodynamic system. Nonlinear Anal. Real World Appl. 22, 423\\u2013434 (2015)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Fan', u'initials': u'JS'}, {u'familyname': u'Alsaedi', u'initials': u'A'}, {u'familyname': u'Hayat', u'initials': u'T'}, {u'familyname': u'Nakamura', u'initials': u'G'}, {u'familyname': u'Zhou', u'initials': u'Y'}], u'occurrence': [{u'handle': u'3280843', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.nonrwa.2014.10.003', u'@type': u'DOI'}], u'journaltitle': u'Nonlinear Anal. Real World Appl.', u'volumeid': u'22', u'firstpage': u'423', u'lastpage': u'434', u'year': u'2015', u'articletitle': {u'#text': u'On strong solutions to the compressible Hall-magnetohydrodynamic system', u'@language': u'En'}}, u'citationnumber': u'13.', u'@id': u'CR13'}")],
[('AUTHOR_FIRST_NAME', u'JS'), ('AUTHOR_LAST_NAME', u'Fan'), ('AUTHOR_FIRST_NAME', u'XJ'), ('AUTHOR_LAST_NAME', u'Jia'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Nakamura'), ('AUTHOR_FIRST_NAME', u'Y'), ('AUTHOR_LAST_NAME', u'Zhou'), ('TITLE', u'On'), ('TITLE', u'well-'), ('TITLE', u'posedness'), ('TITLE', u'and'), ('TITLE', u'blow-'), ('TITLE', u'up'), ('TITLE', u'criteria'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'magnetohydrodynamics'), ('TITLE', u'with'), ('TITLE', u'the'), ('TITLE', u'Hall'), ('TITLE', u'and'), ('TITLE', u'ion-'), ('TITLE', u'slip'), ('TITLE', u'effects'), ('JOURNAL', u'Z.'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'66'), ('YEAR', u'2015'), ('PAGE', u'1695'), ('DOI', u'10.1007/s00033-015-0499-9'), ('REFPLAINTEXT', u'Fan, J.S., Jia, X.J., Nakamura, G., Zhou, Y.: On well-posedness and blow-up criteria for the magnetohydrodynamics with the Hall and ion-slip effects. Z. Angew. Math. Phys. 66, 1695\u20131706 (2015)'), ('REFSTR', "{u'bibunstructured': u'Fan, J.S., Jia, X.J., Nakamura, G., Zhou, Y.: On well-posedness and blow-up criteria for the magnetohydrodynamics with the Hall and ion-slip effects. Z. Angew. Math. Phys. 66, 1695\\u20131706 (2015)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Fan', u'initials': u'JS'}, {u'familyname': u'Jia', u'initials': u'XJ'}, {u'familyname': u'Nakamura', u'initials': u'G'}, {u'familyname': u'Zhou', u'initials': u'Y'}], u'occurrence': [{u'handle': u'3377709', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-015-0499-9', u'@type': u'DOI'}], u'journaltitle': u'Z. Angew. Math. Phys.', u'volumeid': u'66', u'firstpage': u'1695', u'lastpage': u'1706', u'year': u'2015', u'articletitle': {u'#text': u'On well-posedness and blow-up criteria for the magnetohydrodynamics with the Hall and ion-slip effects', u'@language': u'En'}}, u'citationnumber': u'14.', u'@id': u'CR14'}")],
[('AUTHOR_FIRST_NAME', u'JS'), ('AUTHOR_LAST_NAME', u'Fan'), ('AUTHOR_FIRST_NAME', u'WH'), ('AUTHOR_LAST_NAME', u'Yu'), ('TITLE', u'Strong'), ('TITLE', u'solution'), ('TITLE', u'to'), ('TITLE', u'the'), ('TITLE', u'compressible'), ('TITLE', u'magnetohydrodynamic'), ('TITLE', u'equations'), ('TITLE', u'with'), ('TITLE', u'vacuum'), ('JOURNAL', u'Nonlinear'), ('JOURNAL', u'Anal.'), ('JOURNAL', u'Real'), ('JOURNAL', u'World'), ('JOURNAL', u'Appl.'), ('VOLUME', u'10'), ('YEAR', u'2009'), ('PAGE', u'392'), ('DOI', u'10.1016/j.nonrwa.2007.10.001'), ('REFPLAINTEXT', u'Fan, J.S., Yu, W.H.: Strong solution to the compressible magnetohydrodynamic equations with vacuum. Nonlinear Anal. Real World Appl. 10, 392\u2013409 (2009)'), ('REFSTR', "{u'bibunstructured': u'Fan, J.S., Yu, W.H.: Strong solution to the compressible magnetohydrodynamic equations with vacuum. Nonlinear Anal. Real World Appl. 10, 392\\u2013409 (2009)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Fan', u'initials': u'JS'}, {u'familyname': u'Yu', u'initials': u'WH'}], u'occurrence': [{u'handle': u'2451719', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.nonrwa.2007.10.001', u'@type': u'DOI'}], u'journaltitle': u'Nonlinear Anal. Real World Appl.', u'volumeid': u'10', u'firstpage': u'392', u'lastpage': u'409', u'year': u'2009', u'articletitle': {u'#text': u'Strong solution to the compressible magnetohydrodynamic equations with vacuum', u'@language': u'En'}}, u'citationnumber': u'15.', u'@id': u'CR15'}")],
[('AUTHOR_FIRST_NAME', u'TG'), ('AUTHOR_LAST_NAME', u'Forbes'), ('TITLE', u'Magnetic'), ('TITLE', u'reconnection'), ('TITLE', u'in'), ('TITLE', u'solar'), ('TITLE', u'flares'), ('JOURNAL', u'Geophys.'), ('JOURNAL', u'Astrophys.'), ('JOURNAL', u'Fluid'), ('JOURNAL', u'Dyn.'), ('VOLUME', u'62'), ('YEAR', u'1991'), ('PAGE', u'15'), ('REFPLAINTEXT', u'Forbes, T.G.: Magnetic reconnection in solar flares. Geophys. Astrophys. Fluid Dyn. 62, 15\u201336 (1991)'), ('REFSTR', "{u'bibunstructured': u'Forbes, T.G.: Magnetic reconnection in solar flares. Geophys. Astrophys. Fluid Dyn. 62, 15\\u201336 (1991)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Forbes', u'initials': u'TG'}, u'occurrence': {u'handle': u'10.1080/03091929108229123', u'@type': u'DOI'}, u'journaltitle': u'Geophys. Astrophys. Fluid Dyn.', u'volumeid': u'62', u'firstpage': u'15', u'lastpage': u'36', u'year': u'1991', u'articletitle': {u'#text': u'Magnetic reconnection in solar flares', u'@language': u'En'}}, u'citationnumber': u'16.', u'@id': u'CR16'}")],
[('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Gagliardo'), ('TITLE', u'Ulteriori'), ('TITLE', u'propriet'), ('TITLE', u'di'), ('TITLE', u'alcune'), ('TITLE', u'classi'), ('TITLE', u'di'), ('TITLE', u'funzioni'), ('TITLE', u'in'), ('TITLE', u'pi'), ('TITLE', u'variabili'), ('JOURNAL', u'Ricerche'), ('JOURNAL', u'Mat.'), ('JOURNAL', u'Univ.'), ('JOURNAL', u'Napoli'), ('VOLUME', u'8'), ('YEAR', u'1959'), ('PAGE', u'24'), ('REFPLAINTEXT', u'Gagliardo, E.: Ulteriori propriet\xe0 di alcune classi di funzioni in pi\xf9 variabili. Ricerche Mat. Univ. Napoli 8, 24\u201351 (1959)'), ('REFSTR', "{u'bibunstructured': u'Gagliardo, E.: Ulteriori propriet\\xe0 di alcune classi di funzioni in pi\\xf9 variabili. Ricerche Mat. Univ. Napoli 8, 24\\u201351 (1959)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Gagliardo', u'initials': u'E'}, u'occurrence': [{u'handle': u'109295', u'@type': u'AMSID'}, {u'handle': u'0199.44701', u'@type': u'ZLBID'}], u'journaltitle': u'Ricerche Mat. Univ. Napoli', u'volumeid': u'8', u'firstpage': u'24', u'lastpage': u'51', u'year': u'1959', u'articletitle': {u'#text': u'Ulteriori propriet\\xe0 di alcune classi di funzioni in pi\\xf9 variabili', u'@language': u'En'}}, u'citationnumber': u'17.', u'@id': u'CR17'}")],
[('AUTHOR_FIRST_NAME', u'JC'), ('AUTHOR_LAST_NAME', u'Gao'), ('AUTHOR_FIRST_NAME', u'ZA'), ('AUTHOR_LAST_NAME', u'Yao'), ('TITLE', u'Global'), ('TITLE', u'existence'), ('TITLE', u'and'), ('TITLE', u'optimal'), ('TITLE', u'decay'), ('TITLE', u'rates'), ('TITLE', u'of'), ('TITLE', u'solutions'), ('TITLE', u'for'), ('TITLE', u'compressible'), ('TITLE', u'Hall-'), ('TITLE', u'MHD'), ('TITLE', u'equations'), ('JOURNAL', u'Discrete'), ('JOURNAL', u'Contin.'), ('JOURNAL', u'Dyn.'), ('JOURNAL', u'Syst.'), ('VOLUME', u'36'), ('YEAR', u'2016'), ('PAGE', u'3077'), ('REFPLAINTEXT', u'Gao, J.C., Yao, Z.A.: Global existence and optimal decay rates of solutions for compressible Hall-MHD equations. Discrete Contin. Dyn. Syst. 36, 3077\u20133106 (2016)'), ('REFSTR', "{u'bibunstructured': u'Gao, J.C., Yao, Z.A.: Global existence and optimal decay rates of solutions for compressible Hall-MHD equations. Discrete Contin. Dyn. Syst. 36, 3077\\u20133106 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Gao', u'initials': u'JC'}, {u'familyname': u'Yao', u'initials': u'ZA'}], u'occurrence': [{u'handle': u'3485432', u'@type': u'AMSID'}, {u'handle': u'1332.76076', u'@type': u'ZLBID'}], u'journaltitle': u'Discrete Contin. Dyn. Syst.', u'volumeid': u'36', u'firstpage': u'3077', u'lastpage': u'3106', u'year': u'2016', u'articletitle': {u'#text': u'Global existence and optimal decay rates of solutions for compressible Hall-MHD equations', u'@language': u'En'}}, u'citationnumber': u'18.', u'@id': u'CR18'}")],
[('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Hall'), ('TITLE', u'On'), ('TITLE', u'a'), ('TITLE', u'new'), ('TITLE', u'action'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'magnet'), ('TITLE', u'on'), ('TITLE', u'electric'), ('TITLE', u'currents'), ('JOURNAL', u'Am.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Math.'), ('VOLUME', u'2'), ('YEAR', u'1879'), ('PAGE', u'287'), ('DOI', u'10.2307/2369245'), ('REFPLAINTEXT', u'Hall, E.: On a new action of the magnet on electric currents. Am. J. Math. 2, 287\u201392 (1879)'), ('REFSTR', "{u'bibunstructured': u'Hall, E.: On a new action of the magnet on electric currents. Am. J. Math. 2, 287\\u201392 (1879)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Hall', u'initials': u'E'}, u'occurrence': [{u'handle': u'1505227', u'@type': u'AMSID'}, {u'handle': u'10.2307/2369245', u'@type': u'DOI'}], u'journaltitle': u'Am. J. Math.', u'volumeid': u'2', u'firstpage': u'287', u'lastpage': u'92', u'year': u'1879', u'articletitle': {u'#text': u'On a new action of the magnet on electric currents', u'@language': u'En'}}, u'citationnumber': u'19.', u'@id': u'CR19'}")],
[('AUTHOR_FIRST_NAME', u'H'), ('AUTHOR_LAST_NAME', u'Homann'), ('AUTHOR_FIRST_NAME', u'R'), ('AUTHOR_LAST_NAME', u'Grauer'), ('TITLE', u'Bifurcation'), ('TITLE', u'analysis'), ('TITLE', u'of'), ('TITLE', u'magnetic'), ('TITLE', u'reconnection'), ('TITLE', u'in'), ('TITLE', u'Hall-'), ('TITLE', u'MHD'), ('TITLE', u'systems'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'D'), ('VOLUME', u'208'), ('YEAR', u'2005'), ('PAGE', u'59'), ('DOI', u'10.1016/j.physd.2005.06.003'), ('REFPLAINTEXT', u'Homann, H., Grauer, R.: Bifurcation analysis of magnetic reconnection in Hall-MHD systems. Phys. D 208, 59\u201372 (2005)'), ('REFSTR', "{u'bibunstructured': u'Homann, H., Grauer, R.: Bifurcation analysis of magnetic reconnection in Hall-MHD systems. Phys. D 208, 59\\u201372 (2005)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Homann', u'initials': u'H'}, {u'familyname': u'Grauer', u'initials': u'R'}], u'occurrence': [{u'handle': u'2167907', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.physd.2005.06.003', u'@type': u'DOI'}], u'journaltitle': u'Phys. D', u'volumeid': u'208', u'firstpage': u'59', u'lastpage': u'72', u'year': u'2005', u'articletitle': {u'#text': u'Bifurcation analysis of magnetic reconnection in Hall-MHD systems', u'@language': u'En'}}, u'citationnumber': u'20.', u'@id': u'CR20'}")],
[('AUTHOR_FIRST_NAME', u'XP'), ('AUTHOR_LAST_NAME', u'Hu'), ('AUTHOR_FIRST_NAME', u'DH'), ('AUTHOR_LAST_NAME', u'Wang'), ('TITLE', u'Global'), ('TITLE', u'existence'), ('TITLE', u'and'), ('TITLE', u'large-'), ('TITLE', u'time'), ('TITLE', u'behavior'), ('TITLE', u'of'), ('TITLE', u'solutions'), ('TITLE', u'to'), ('TITLE', u'the'), ('TITLE', u'three-'), ('TITLE', u'dimensional'), ('TITLE', u'equations'), ('TITLE', u'of'), ('TITLE', u'compressible'), ('TITLE', u'magnetohydrodynamic'), ('TITLE', u'flows'), ('JOURNAL', u'Arch.'), ('JOURNAL', u'Ration.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Anal.'), ('VOLUME', u'197'), ('YEAR', u'2010'), ('PAGE', u'203'), ('DOI', u'10.1007/s00205-010-0295-9'), ('REFPLAINTEXT', u'Hu, X.P., Wang, D.H.: Global existence and large-time behavior of solutions to the three-dimensional equations of compressible magnetohydrodynamic flows. Arch. Ration. Mech. Anal. 197, 203\u2013238 (2010)'), ('REFSTR', "{u'bibunstructured': u'Hu, X.P., Wang, D.H.: Global existence and large-time behavior of solutions to the three-dimensional equations of compressible magnetohydrodynamic flows. Arch. Ration. Mech. Anal. 197, 203\\u2013238 (2010)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Hu', u'initials': u'XP'}, {u'familyname': u'Wang', u'initials': u'DH'}], u'occurrence': [{u'handle': u'2646819', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00205-010-0295-9', u'@type': u'DOI'}], u'journaltitle': u'Arch. Ration. Mech. Anal.', u'volumeid': u'197', u'firstpage': u'203', u'lastpage': u'238', u'year': u'2010', u'articletitle': {u'#text': u'Global existence and large-time behavior of solutions to the three-dimensional equations of compressible magnetohydrodynamic flows', u'@language': u'En'}}, u'citationnumber': u'21.', u'@id': u'CR21'}")],
[('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Majda'), ('YEAR', u'1984'), ('PUBLISHER', u'Compressible'), ('PUBLISHER', u'Fluid'), ('PUBLISHER', u'Flow'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Systems'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Conservation'), ('PUBLISHER', u'Laws'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Several'), ('PUBLISHER', u'Space'), ('PUBLISHER', u'Variables'), ('REFPLAINTEXT', u'Majda, A.: Compressible Fluid Flow and Systems of Conservation Laws in Several Space Variables. Springer, New York (1984)'), ('REFSTR', "{u'bibunstructured': u'Majda, A.: Compressible Fluid Flow and Systems of Conservation Laws in Several Space Variables. Springer, New York (1984)', u'citationnumber': u'22.', u'@id': u'CR22', u'bibbook': {u'bibauthorname': {u'familyname': u'Majda', u'initials': u'A'}, u'publisherlocation': u'New York', u'occurrence': {u'handle': u'10.1007/978-1-4612-1116-7', u'@type': u'DOI'}, u'booktitle': u'Compressible Fluid Flow and Systems of Conservation Laws in Several Space Variables', u'year': u'1984', u'publishername': u'Springer'}}")],
[('AUTHOR_FIRST_NAME', u'PD'), ('AUTHOR_LAST_NAME', u'Mininni'), ('AUTHOR_FIRST_NAME', u'DO'), ('AUTHOR_LAST_NAME', u'Gmez'), ('AUTHOR_FIRST_NAME', u'SM'), ('AUTHOR_LAST_NAME', u'Mahajan'), ('TITLE', u'Dynamo'), ('TITLE', u'action'), ('TITLE', u'in'), ('TITLE', u'magnetohydrodynamics'), ('TITLE', u'and'), ('TITLE', u'Hall'), ('TITLE', u'magnetohydrodynamics'), ('JOURNAL', u'Astrophys.'), ('JOURNAL', u'J.'), ('VOLUME', u'587'), ('YEAR', u'2003'), ('PAGE', u'472'), ('REFPLAINTEXT', u'Mininni, P.D., G\xf2mez, D.O., Mahajan, S.M.: Dynamo action in magnetohydrodynamics and Hall magnetohydrodynamics. Astrophys. J. 587, 472\u2013481 (2003)'), ('REFSTR', "{u'bibunstructured': u'Mininni, P.D., G\\xf2mez, D.O., Mahajan, S.M.: Dynamo action in magnetohydrodynamics and Hall magnetohydrodynamics. Astrophys. J. 587, 472\\u2013481 (2003)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Mininni', u'initials': u'PD'}, {u'familyname': u'G\\xf2mez', u'initials': u'DO'}, {u'familyname': u'Mahajan', u'initials': u'SM'}], u'occurrence': {u'handle': u'10.1086/368181', u'@type': u'DOI'}, u'journaltitle': u'Astrophys. J.', u'volumeid': u'587', u'firstpage': u'472', u'lastpage': u'481', u'year': u'2003', u'articletitle': {u'#text': u'Dynamo action in magnetohydrodynamics and Hall magnetohydrodynamics', u'@language': u'En'}}, u'citationnumber': u'23.', u'@id': u'CR23'}")],
[('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Kobayashi'), ('TITLE', u'Some'), ('TITLE', u'estimates'), ('TITLE', u'of'), ('TITLE', u'solutions'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'equations'), ('TITLE', u'of'), ('TITLE', u'motion'), ('TITLE', u'of'), ('TITLE', u'compressible'), ('TITLE', u'viscous'), ('TITLE', u'fluid'), ('TITLE', u'in'), ('TITLE', u'the'), ('TITLE', u'three-'), ('TITLE', u'dimensional'), ('TITLE', u'exterior'), ('TITLE', u'domain'), ('JOURNAL', u'J.'), ('JOURNAL', u'Differ.'), ('JOURNAL', u'Equ.'), ('VOLUME', u'184'), ('YEAR', u'2002'), ('PAGE', u'587'), ('DOI', u'10.1006/jdeq.2002.4158'), ('REFPLAINTEXT', u'Kobayashi, T.: Some estimates of solutions for the equations of motion of compressible viscous fluid in the three-dimensional exterior domain. J. Differ. Equ. 184, 587\u2013619 (2002)'), ('REFSTR', "{u'bibunstructured': u'Kobayashi, T.: Some estimates of solutions for the equations of motion of compressible viscous fluid in the three-dimensional exterior domain. J. Differ. Equ. 184, 587\\u2013619 (2002)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Kobayashi', u'initials': u'T'}, u'occurrence': [{u'handle': u'1929890', u'@type': u'AMSID'}, {u'handle': u'10.1006/jdeq.2002.4158', u'@type': u'DOI'}], u'journaltitle': u'J. Differ. Equ.', u'volumeid': u'184', u'firstpage': u'587', u'lastpage': u'619', u'year': u'2002', u'articletitle': {u'#text': u'Some estimates of solutions for the equations of motion of compressible viscous fluid in the three-dimensional exterior domain', u'@language': u'En'}}, u'citationnumber': u'24.', u'@id': u'CR24'}")],
[('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Kobayashi'), ('AUTHOR_FIRST_NAME', u'Y'), ('AUTHOR_LAST_NAME', u'Shibata'), ('TITLE', u'Decay'), ('TITLE', u'estimates'), ('TITLE', u'of'), ('TITLE', u'solutions'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'equations'), ('TITLE', u'of'), ('TITLE', u'motion'), ('TITLE', u'of'), ('TITLE', u'compressible'), ('TITLE', u'viscous'), ('TITLE', u'and'), ('TITLE', u'heat-'), ('TITLE', u'conductive'), ('TITLE', u'gases'), ('TITLE', u'in'), ('TITLE', u'an'), ('TITLE', u'exterior'), ('TITLE', u'domain'), ('TITLE', u'in'), ('TITLE', u'R'), ('JOURNAL', u'Commun.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'251'), ('YEAR', u'2004'), ('PAGE', u'365'), ('REFPLAINTEXT', u'Kobayashi, T., Shibata, Y.: Decay estimates of solutions for the equations of motion of compressible viscous and heat-conductive gases in an exterior domain in R. Commun. Math. Phys. 251, 365\u2013376 (2004)'), ('REFSTR', "{u'bibunstructured': {u'#text': u'Kobayashi, T., Shibata, Y.: Decay estimates of solutions for the equations of motion of compressible viscous and heat-conductive gases in an exterior domain in R. Commun. Math. Phys. 251, 365\\u2013376 (2004)', u'sup': u'3'}, u'bibarticle': {u'bibauthorname': [{u'familyname': u'Kobayashi', u'initials': u'T'}, {u'familyname': u'Shibata', u'initials': u'Y'}], u'occurrence': {u'handle': u'10.1007/s00220-004-1062-2', u'@type': u'DOI'}, u'journaltitle': u'Commun. Math. Phys.', u'volumeid': u'251', u'firstpage': u'365', u'lastpage': u'376', u'year': u'2004', u'articletitle': {u'#text': u'Decay estimates of solutions for the equations of motion of compressible viscous and heat-conductive gases in an exterior domain in R', u'sup': u'3', u'@language': u'En'}}, u'citationnumber': u'25.', u'@id': u'CR25'}")],
[('AUTHOR_FIRST_NAME', u'HL'), ('AUTHOR_LAST_NAME', u'Li'), ('AUTHOR_FIRST_NAME', u'XY'), ('AUTHOR_LAST_NAME', u'Xu'), ('AUTHOR_FIRST_NAME', u'JW'), ('AUTHOR_LAST_NAME', u'Zhang'), ('TITLE', u'Global'), ('TITLE', u'classical'), ('TITLE', u'solutions'), ('TITLE', u'to'), ('TITLE', u'3D'), ('TITLE', u'compressible'), ('TITLE', u'magnetohydrodynamic'), ('TITLE', u'equations'), ('TITLE', u'with'), ('TITLE', u'large'), ('TITLE', u'oscillations'), ('TITLE', u'and'), ('TITLE', u'vaccum'), ('JOURNAL', u'SIAM'), ('JOURNAL', u'J.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Anal.'), ('VOLUME', u'45'), ('YEAR', u'2013'), ('PAGE', u'1356'), ('DOI', u'10.1137/120893355'), ('REFPLAINTEXT', u'Li, H.L., Xu, X.Y., Zhang, J.W.: Global classical solutions to 3D compressible magnetohydrodynamic equations with large oscillations and vaccum. SIAM J. Math. Anal. 45, 1356\u20131387 (2013)'), ('REFSTR', "{u'bibunstructured': u'Li, H.L., Xu, X.Y., Zhang, J.W.: Global classical solutions to 3D compressible magnetohydrodynamic equations with large oscillations and vaccum. SIAM J. Math. Anal. 45, 1356\\u20131387 (2013)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Li', u'initials': u'HL'}, {u'familyname': u'Xu', u'initials': u'XY'}, {u'familyname': u'Zhang', u'initials': u'JW'}], u'occurrence': [{u'handle': u'3056749', u'@type': u'AMSID'}, {u'handle': u'10.1137/120893355', u'@type': u'DOI'}], u'journaltitle': u'SIAM J. Math. Anal.', u'volumeid': u'45', u'firstpage': u'1356', u'lastpage': u'1387', u'year': u'2013', u'articletitle': {u'#text': u'Global classical solutions to 3D compressible magnetohydrodynamic equations with large oscillations and vaccum', u'@language': u'En'}}, u'citationnumber': u'26.', u'@id': u'CR26'}")],
[('AUTHOR_FIRST_NAME', u'BQ'), ('AUTHOR_LAST_NAME', u'Lv'), ('AUTHOR_FIRST_NAME', u'XD'), ('AUTHOR_LAST_NAME', u'Shi'), ('AUTHOR_FIRST_NAME', u'XY'), ('AUTHOR_LAST_NAME', u'Xu'), ('TITLE', u'Global'), ('TITLE', u'existence'), ('TITLE', u'and'), ('TITLE', u'large-'), ('TITLE', u'time'), ('TITLE', u'asymptotic'), ('TITLE', u'behavior'), ('TITLE', u'of'), ('TITLE', u'strong'), ('TITLE', u'solutions'), ('TITLE', u'to'), ('TITLE', u'the'), ('TITLE', u'compressible'), ('TITLE', u'magnetohydrodynamic'), ('TITLE', u'equations'), ('TITLE', u'with'), ('TITLE', u'vacuum'), ('JOURNAL', u'Indiana'), ('JOURNAL', u'Univ.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'J.'), ('VOLUME', u'65'), ('YEAR', u'2016'), ('PAGE', u'925'), ('DOI', u'10.1512/iumj.2016.65.5813'), ('REFPLAINTEXT', u'Lv, B.Q., Shi, X.D., Xu, X.Y.: Global existence and large-time asymptotic behavior of strong solutions to the compressible magnetohydrodynamic equations with vacuum. Indiana Univ. Math. J. 65, 925\u2013975 (2016)'), ('REFSTR', "{u'bibunstructured': u'Lv, B.Q., Shi, X.D., Xu, X.Y.: Global existence and large-time asymptotic behavior of strong solutions to the compressible magnetohydrodynamic equations with vacuum. Indiana Univ. Math. J. 65, 925\\u2013975 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Lv', u'initials': u'BQ'}, {u'familyname': u'Shi', u'initials': u'XD'}, {u'familyname': u'Xu', u'initials': u'XY'}], u'occurrence': [{u'handle': u'3528824', u'@type': u'AMSID'}, {u'handle': u'10.1512/iumj.2016.65.5813', u'@type': u'DOI'}], u'journaltitle': u'Indiana Univ. Math. J.', u'volumeid': u'65', u'firstpage': u'925', u'lastpage': u'975', u'year': u'2016', u'articletitle': {u'#text': u'Global existence and large-time asymptotic behavior of strong solutions to the compressible magnetohydrodynamic equations with vacuum', u'@language': u'En'}}, u'citationnumber': u'27.', u'@id': u'CR27'}")],
[('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Matsumura'), ('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Nishida'), ('TITLE', u'The'), ('TITLE', u'initial'), ('TITLE', u'value'), ('TITLE', u'problem'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'equations'), ('TITLE', u'of'), ('TITLE', u'motion'), ('TITLE', u'of'), ('TITLE', u'viscous'), ('TITLE', u'and'), ('TITLE', u'heat-'), ('TITLE', u'conductive'), ('TITLE', u'gases'), ('JOURNAL', u'J.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Kyoto'), ('JOURNAL', u'Univ.'), ('VOLUME', u'20'), ('YEAR', u'1980'), ('PAGE', u'67'), ('DOI', u'10.1215/kjm/1250522322'), ('REFPLAINTEXT', u'Matsumura, A., Nishida, T.: The initial value problem for the equations of motion of viscous and heat-conductive gases. J. Math. Kyoto Univ. 20, 67\u2013104 (1980)'), ('REFSTR', "{u'bibunstructured': u'Matsumura, A., Nishida, T.: The initial value problem for the equations of motion of viscous and heat-conductive gases. J. Math. Kyoto Univ. 20, 67\\u2013104 (1980)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Matsumura', u'initials': u'A'}, {u'familyname': u'Nishida', u'initials': u'T'}], u'occurrence': [{u'handle': u'564670', u'@type': u'AMSID'}, {u'handle': u'10.1215/kjm/1250522322', u'@type': u'DOI'}], u'journaltitle': u'J. Math. Kyoto Univ.', u'volumeid': u'20', u'firstpage': u'67', u'lastpage': u'104', u'year': u'1980', u'articletitle': {u'#text': u'The initial value problem for the equations of motion of viscous and heat-conductive gases', u'@language': u'En'}}, u'citationnumber': u'28.', u'@id': u'CR28'}")],
[('AUTHOR_FIRST_NAME', u'XK'), ('AUTHOR_LAST_NAME', u'Pu'), ('AUTHOR_FIRST_NAME', u'BL'), ('AUTHOR_LAST_NAME', u'Guo'), ('TITLE', u'Global'), ('TITLE', u'existence'), ('TITLE', u'and'), ('TITLE', u'convergence'), ('TITLE', u'rates'), ('TITLE', u'of'), ('TITLE', u'smooth'), ('TITLE', u'solutions'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'full'), ('TITLE', u'compressible'), ('TITLE', u'MHD'), ('TITLE', u'equations'), ('JOURNAL', u'Z.'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'64'), ('YEAR', u'2013'), ('PAGE', u'519'), ('DOI', u'10.1007/s00033-012-0245-5'), ('REFPLAINTEXT', u'Pu, X.K., Guo, B.L.: Global existence and convergence rates of smooth solutions for the full compressible MHD equations. Z. Angew. Math. Phys. 64, 519\u2013538 (2013)'), ('REFSTR', "{u'bibunstructured': u'Pu, X.K., Guo, B.L.: Global existence and convergence rates of smooth solutions for the full compressible MHD equations. Z. Angew. Math. Phys. 64, 519\\u2013538 (2013)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Pu', u'initials': u'XK'}, {u'familyname': u'Guo', u'initials': u'BL'}], u'occurrence': [{u'handle': u'3068837', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-012-0245-5', u'@type': u'DOI'}], u'journaltitle': u'Z. Angew. Math. Phys.', u'volumeid': u'64', u'firstpage': u'519', u'lastpage': u'538', u'year': u'2013', u'articletitle': {u'#text': u'Global existence and convergence rates of smooth solutions for the full compressible MHD equations', u'@language': u'En'}}, u'citationnumber': u'29.', u'@id': u'CR29'}")],
[('AUTHOR_FIRST_NAME', u'DA'), ('AUTHOR_LAST_NAME', u'Shalybkov'), ('AUTHOR_FIRST_NAME', u'VA'), ('AUTHOR_LAST_NAME', u'Urpin'), ('TITLE', u'The'), ('TITLE', u'Hall'), ('TITLE', u'effect'), ('TITLE', u'and'), ('TITLE', u'the'), ('TITLE', u'decay'), ('TITLE', u'of'), ('TITLE', u'magnetic'), ('TITLE', u'fields'), ('JOURNAL', u'Astron.'), ('JOURNAL', u'Astrophys.'), ('VOLUME', u'321'), ('YEAR', u'1997'), ('PAGE', u'685'), ('REFPLAINTEXT', u'Shalybkov, D.A., Urpin, V.A.: The Hall effect and the decay of magnetic fields. Astron. Astrophys. 321, 685\u2013690 (1997)'), ('REFSTR', "{u'bibunstructured': u'Shalybkov, D.A., Urpin, V.A.: The Hall effect and the decay of magnetic fields. Astron. Astrophys. 321, 685\\u2013690 (1997)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Shalybkov', u'initials': u'DA'}, {u'familyname': u'Urpin', u'initials': u'VA'}], u'journaltitle': u'Astron. Astrophys.', u'volumeid': u'321', u'firstpage': u'685', u'lastpage': u'690', u'year': u'1997', u'articletitle': {u'#text': u'The Hall effect and the decay of magnetic fields', u'@language': u'En'}}, u'citationnumber': u'30.', u'@id': u'CR30'}")],
[('AUTHOR_FIRST_NAME', u'Z'), ('AUTHOR_LAST_NAME', u'Tan'), ('AUTHOR_FIRST_NAME', u'HQ'), ('AUTHOR_LAST_NAME', u'Wang'), ('TITLE', u'Optimal'), ('TITLE', u'decay'), ('TITLE', u'rates'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'compressible'), ('TITLE', u'magnetohydrodynamic'), ('TITLE', u'equations'), ('JOURNAL', u'Nonlinear'), ('JOURNAL', u'Anal.'), ('JOURNAL', u'Real'), ('JOURNAL', u'World'), ('JOURNAL', u'Appl.'), ('VOLUME', u'14'), ('YEAR', u'2013'), ('PAGE', u'188'), ('DOI', u'10.1016/j.nonrwa.2012.05.012'), ('REFPLAINTEXT', u'Tan, Z., Wang, H.Q.: Optimal decay rates of the compressible magnetohydrodynamic equations. Nonlinear Anal. Real World Appl. 14, 188\u2013201 (2013)'), ('REFSTR', "{u'bibunstructured': u'Tan, Z., Wang, H.Q.: Optimal decay rates of the compressible magnetohydrodynamic equations. Nonlinear Anal. Real World Appl. 14, 188\\u2013201 (2013)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Tan', u'initials': u'Z'}, {u'familyname': u'Wang', u'initials': u'HQ'}], u'occurrence': [{u'handle': u'2969828', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.nonrwa.2012.05.012', u'@type': u'DOI'}], u'journaltitle': u'Nonlinear Anal. Real World Appl.', u'volumeid': u'14', u'firstpage': u'188', u'lastpage': u'201', u'year': u'2013', u'articletitle': {u'#text': u'Optimal decay rates of the compressible magnetohydrodynamic equations', u'@language': u'En'}}, u'citationnumber': u'31.', u'@id': u'CR31'}")],
[('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'Treves'), ('YEAR', u'1975'), ('PUBLISHER', u'Basic'), ('PUBLISHER', u'Linear'), ('PUBLISHER', u'Partial'), ('PUBLISHER', u'Differential'), ('PUBLISHER', u'Equations'), ('REFPLAINTEXT', u'Treves, F.: Basic Linear Partial Differential Equations. Academic Press, New York (1975)'), ('REFSTR', "{u'bibunstructured': u'Treves, F.: Basic Linear Partial Differential Equations. Academic Press, New York (1975)', u'citationnumber': u'32.', u'@id': u'CR32', u'bibbook': {u'bibauthorname': {u'familyname': u'Treves', u'initials': u'F'}, u'publisherlocation': u'New York', u'occurrence': {u'handle': u'0305.35001', u'@type': u'ZLBID'}, u'booktitle': u'Basic Linear Partial Differential Equations', u'year': u'1975', u'publishername': u'Academic Press'}}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Wardle'), ('TITLE', u'Star'), ('TITLE', u'formation'), ('TITLE', u'and'), ('TITLE', u'the'), ('TITLE', u'Hall'), ('TITLE', u'effect'), ('JOURNAL', u'Astrophys.'), ('JOURNAL', u'Space'), ('JOURNAL', u'Sci.'), ('VOLUME', u'292'), ('YEAR', u'2004'), ('PAGE', u'317'), ('REFPLAINTEXT', u'Wardle, M.: Star formation and the Hall effect. Astrophys. Space Sci. 292, 317\u2013323 (2004)'), ('REFSTR', "{u'bibunstructured': u'Wardle, M.: Star formation and the Hall effect. Astrophys. Space Sci. 292, 317\\u2013323 (2004)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Wardle', u'initials': u'M'}, u'occurrence': {u'handle': u'10.1023/B:ASTR.0000045033.80068.1f', u'@type': u'DOI'}, u'journaltitle': u'Astrophys. Space Sci.', u'volumeid': u'292', u'firstpage': u'317', u'lastpage': u'323', u'year': u'2004', u'articletitle': {u'#text': u'Star formation and the Hall effect', u'@language': u'En'}}, u'citationnumber': u'33.', u'@id': u'CR33'}")],
[('AUTHOR_FIRST_NAME', u'ZY'), ('AUTHOR_LAST_NAME', u'Xiang'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'Cauchy'), ('TITLE', u'problem'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'compressible'), ('TITLE', u'Hall-'), ('TITLE', u'magneto-'), ('TITLE', u'hydrodynamic'), ('TITLE', u'equatioins'), ('JOURNAL', u'J.'), ('JOURNAL', u'Evol.'), ('JOURNAL', u'Equ.'), ('VOLUME', u'17'), ('YEAR', u'2017'), ('PAGE', u'685'), ('DOI', u'10.1007/s00028-016-0333-7'), ('REFPLAINTEXT', u'Xiang, Z.Y.: On the Cauchy problem for the compressible Hall-magneto-hydrodynamic equatioins. J. Evol. Equ. 17, 685\u2013715 (2017)'), ('REFSTR', "{u'bibunstructured': u'Xiang, Z.Y.: On the Cauchy problem for the compressible Hall-magneto-hydrodynamic equatioins. J. Evol. Equ. 17, 685\\u2013715 (2017)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Xiang', u'initials': u'ZY'}, u'occurrence': [{u'handle': u'3665226', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00028-016-0333-7', u'@type': u'DOI'}], u'journaltitle': u'J. Evol. Equ.', u'volumeid': u'17', u'firstpage': u'685', u'lastpage': u'715', u'year': u'2017', u'articletitle': {u'#text': u'On the Cauchy problem for the compressible Hall-magneto-hydrodynamic equatioins', u'@language': u'En'}}, u'citationnumber': u'34.', u'@id': u'CR34'}")],
[('AUTHOR_FIRST_NAME', u'JW'), ('AUTHOR_LAST_NAME', u'Zhang'), ('AUTHOR_FIRST_NAME', u'JN'), ('AUTHOR_LAST_NAME', u'Zhao'), ('TITLE', u'Some'), ('TITLE', u'decay'), ('TITLE', u'estimates'), ('TITLE', u'of'), ('TITLE', u'solutions'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'3-'), ('TITLE', u'D'), ('TITLE', u'compressible'), ('TITLE', u'isentropic'), ('TITLE', u'magnetohydrodynamics'), ('JOURNAL', u'Commun.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'8'), ('YEAR', u'2010'), ('PAGE', u'835'), ('DOI', u'10.4310/CMS.2010.v8.n4.a2'), ('REFPLAINTEXT', u'Zhang, J.W., Zhao, J.N.: Some decay estimates of solutions for the 3-D compressible isentropic magnetohydrodynamics. Commun. Math. Sci. 8, 835\u2013850 (2010)'), ('REFSTR', "{u'bibunstructured': u'Zhang, J.W., Zhao, J.N.: Some decay estimates of solutions for the 3-D compressible isentropic magnetohydrodynamics. Commun. Math. Sci. 8, 835\\u2013850 (2010)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Zhang', u'initials': u'JW'}, {u'familyname': u'Zhao', u'initials': u'JN'}], u'occurrence': [{u'handle': u'2744908', u'@type': u'AMSID'}, {u'handle': u'10.4310/CMS.2010.v8.n4.a2', u'@type': u'DOI'}], u'journaltitle': u'Commun. Math. Sci.', u'volumeid': u'8', u'firstpage': u'835', u'lastpage': u'850', u'year': u'2010', u'articletitle': {u'#text': u'Some decay estimates of solutions for the 3-D compressible isentropic magnetohydrodynamics', u'@language': u'En'}}, u'citationnumber': u'35.', u'@id': u'CR35'}")],
[('AUTHOR_FIRST_NAME', u'A-L'), ('AUTHOR_LAST_NAME', u'Bessoud'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'Krasucki'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Michaille'), ('TITLE', u'Multi-'), ('TITLE', u'materials'), ('TITLE', u'with'), ('TITLE', u'strong'), ('TITLE', u'interface:'), ('TITLE', u'variational'), ('TITLE', u'modelings'), ('JOURNAL', u'Asymptot.'), ('JOURNAL', u'Anal.'), ('VOLUME', u'61'), ('YEAR', u'2009'), ('PAGE', u'1'), ('REFPLAINTEXT', u'Bessoud, A.-L., Krasucki, F., Michaille, G.: Multi-materials with strong interface: variational modelings. Asymptot. Anal. 61, 1\u201319 (2009)'), ('REFSTR', "{u'bibunstructured': u'Bessoud, A.-L., Krasucki, F., Michaille, G.: Multi-materials with strong interface: variational modelings. Asymptot. Anal. 61, 1\\u201319 (2009)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Bessoud', u'initials': u'A-L'}, {u'familyname': u'Krasucki', u'initials': u'F'}, {u'familyname': u'Michaille', u'initials': u'G'}], u'occurrence': [{u'handle': u'2483518', u'@type': u'AMSID'}, {u'handle': u'1201.35032', u'@type': u'ZLBID'}], u'journaltitle': u'Asymptot. Anal.', u'volumeid': u'61', u'firstpage': u'1', u'lastpage': u'19', u'year': u'2009', u'articletitle': {u'#text': u'Multi-materials with strong interface: variational modelings', u'@outputmedium': u'All', u'@language': u'En'}}, u'citationnumber': u'1.', u'@id': u'CR1'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Bonnet'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Constantinescu'), ('TITLE', u'Inverse'), ('TITLE', u'problems'), ('TITLE', u'in'), ('TITLE', u'elasticity'), ('JOURNAL', u'Inverse'), ('JOURNAL', u'Probl.'), ('VOLUME', u'21'), ('YEAR', u'2005'), ('PAGE', u'R1'), ('DOI', u'10.1088/0266-5611/21/2/R01'), ('REFPLAINTEXT', u'Bonnet, M., Constantinescu, A.: Inverse problems in elasticity. Inverse Probl. 21, R1\u2013R50 (2005)'), ('REFSTR', "{u'bibunstructured': u'Bonnet, M., Constantinescu, A.: Inverse problems in elasticity. Inverse Probl. 21, R1\\u2013R50 (2005)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Bonnet', u'initials': u'M'}, {u'familyname': u'Constantinescu', u'initials': u'A'}], u'occurrence': [{u'handle': u'2146268', u'@type': u'AMSID'}, {u'handle': u'10.1088/0266-5611/21/2/R01', u'@type': u'DOI'}], u'journaltitle': u'Inverse Probl.', u'volumeid': u'21', u'firstpage': u'R1', u'lastpage': u'R50', u'year': u'2005', u'articletitle': {u'#text': u'Inverse problems in elasticity', u'@language': u'En'}}, u'citationnumber': u'2.', u'@id': u'CR2'}")],
[('AUTHOR_FIRST_NAME', u'GP'), ('AUTHOR_LAST_NAME', u'Cherepanov'), ('YEAR', u'1979'), ('PUBLISHER', u'Mechanics'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Brittle'), ('PUBLISHER', u'Fracture'), ('REFPLAINTEXT', u'Cherepanov, G.P.: Mechanics of Brittle Fracture. McGraw-Hill, New York (1979)'), ('REFSTR', "{u'bibunstructured': u'Cherepanov, G.P.: Mechanics of Brittle Fracture. McGraw-Hill, New York (1979)', u'citationnumber': u'3.', u'@id': u'CR3', u'bibbook': {u'bibauthorname': {u'familyname': u'Cherepanov', u'initials': u'GP'}, u'publisherlocation': u'New York', u'occurrence': {u'handle': u'0442.73100', u'@type': u'ZLBID'}, u'booktitle': u'Mechanics of Brittle Fracture', u'year': u'1979', u'publishername': u'McGraw-Hill'}}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Eskin'), ('AUTHOR_FIRST_NAME', u'J'), ('AUTHOR_LAST_NAME', u'Ralston'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'inverse'), ('TITLE', u'boundary'), ('TITLE', u'value'), ('TITLE', u'problem'), ('TITLE', u'for'), ('TITLE', u'linear'), ('TITLE', u'isotropic'), ('TITLE', u'elasticity'), ('JOURNAL', u'Inverse'), ('JOURNAL', u'Probl.'), ('VOLUME', u'18'), ('YEAR', u'2002'), ('PAGE', u'907'), ('DOI', u'10.1088/0266-5611/18/3/324'), ('REFPLAINTEXT', u'Eskin, G., Ralston, J.: On the inverse boundary value problem for linear isotropic elasticity. Inverse Probl. 18, 907\u2013921 (2002)'), ('REFSTR', "{u'bibunstructured': u'Eskin, G., Ralston, J.: On the inverse boundary value problem for linear isotropic elasticity. Inverse Probl. 18, 907\\u2013921 (2002)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Eskin', u'initials': u'G'}, {u'familyname': u'Ralston', u'initials': u'J'}], u'occurrence': [{u'handle': u'1910209', u'@type': u'AMSID'}, {u'handle': u'10.1088/0266-5611/18/3/324', u'@type': u'DOI'}], u'journaltitle': u'Inverse Probl.', u'volumeid': u'18', u'firstpage': u'907', u'lastpage': u'921', u'year': u'2002', u'articletitle': {u'#text': u'On the inverse boundary value problem for linear isotropic elasticity', u'@language': u'En'}}, u'citationnumber': u'4.', u'@id': u'CR4'}")],
[('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Grisvard'), ('YEAR', u'1992'), ('PUBLISHER', u'Singularities'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Boundary'), ('PUBLISHER', u'Value'), ('PUBLISHER', u'Problems'), ('REFPLAINTEXT', u'Grisvard, P.: Singularities in Boundary Value Problems. Springer, Paris (1992)'), ('REFSTR', "{u'bibunstructured': u'Grisvard, P.: Singularities in Boundary Value Problems. Springer, Paris (1992)', u'citationnumber': u'5.', u'@id': u'CR5', u'bibbook': {u'bibauthorname': {u'familyname': u'Grisvard', u'initials': u'P'}, u'publisherlocation': u'Paris', u'occurrence': {u'handle': u'0766.35001', u'@type': u'ZLBID'}, u'booktitle': u'Singularities in Boundary Value Problems', u'year': u'1992', u'publishername': u'Springer'}}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Ikehata'), ('TITLE', u'Reconstruction'), ('TITLE', u'of'), ('TITLE', u'inclusion'), ('TITLE', u'from'), ('TITLE', u'boundary'), ('TITLE', u'measurements'), ('JOURNAL', u'J.'), ('JOURNAL', u'Inverse'), ('JOURNAL', u'Ill'), ('JOURNAL', u'Posed'), ('JOURNAL', u'Probl.'), ('VOLUME', u'10'), ('YEAR', u'2002'), ('PAGE', u'37'), ('DOI', u'10.1515/jiip.2002.10.1.37'), ('REFPLAINTEXT', u'Ikehata, M.: Reconstruction of inclusion from boundary measurements. J. Inverse Ill Posed Probl. 10, 37\u201365 (2002)'), ('REFSTR', "{u'bibunstructured': u'Ikehata, M.: Reconstruction of inclusion from boundary measurements. J. Inverse Ill Posed Probl. 10, 37\\u201365 (2002)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Ikehata', u'initials': u'M'}, u'occurrence': [{u'handle': u'1889237', u'@type': u'AMSID'}, {u'handle': u'10.1515/jiip.2002.10.1.37', u'@type': u'DOI'}], u'journaltitle': u'J. Inverse Ill Posed Probl.', u'volumeid': u'10', u'firstpage': u'37', u'lastpage': u'65', u'year': u'2002', u'articletitle': {u'#text': u'Reconstruction of inclusion from boundary measurements', u'@language': u'En'}}, u'citationnumber': u'6.', u'@id': u'CR6'}")],
[('AUTHOR_FIRST_NAME', u'H'), ('AUTHOR_LAST_NAME', u'Itou'), ('AUTHOR_FIRST_NAME', u'VA'), ('AUTHOR_LAST_NAME', u'Kovtunenko'), ('AUTHOR_FIRST_NAME', u'KR'), ('AUTHOR_LAST_NAME', u'Rajagopal'), ('TITLE', u'Nonlinear'), ('TITLE', u'elasticity'), ('TITLE', u'with'), ('TITLE', u'limiting'), ('TITLE', u'small'), ('TITLE', u'strain'), ('TITLE', u'for'), ('TITLE', u'cracks'), ('TITLE', u'subject'), ('TITLE', u'to'), ('TITLE', u'non-'), ('TITLE', u'penetration'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Solids'), ('VOLUME', u'22'), ('YEAR', u'2017'), ('PAGE', u'1334'), ('DOI', u'10.1177/1081286516632380'), ('REFPLAINTEXT', u'Itou, H., Kovtunenko, V.A., Rajagopal, K.R.: Nonlinear elasticity with limiting small strain for cracks subject to non-penetration. Math. Mech. Solids 22, 1334\u20131346 (2017)'), ('REFSTR', "{u'bibunstructured': u'Itou, H., Kovtunenko, V.A., Rajagopal, K.R.: Nonlinear elasticity with limiting small strain for cracks subject to non-penetration. Math. Mech. Solids 22, 1334\\u20131346 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Itou', u'initials': u'H'}, {u'familyname': u'Kovtunenko', u'initials': u'VA'}, {u'familyname': u'Rajagopal', u'initials': u'KR'}], u'occurrence': [{u'handle': u'3659617', u'@type': u'AMSID'}, {u'handle': u'10.1177/1081286516632380', u'@type': u'DOI'}], u'journaltitle': u'Math. Mech. Solids', u'volumeid': u'22', u'firstpage': u'1334', u'lastpage': u'1346', u'year': u'2017', u'articletitle': {u'#text': u'Nonlinear elasticity with limiting small strain for cracks subject to non-penetration', u'@language': u'En'}}, u'citationnumber': u'7.', u'@id': u'CR7'}")],
[('AUTHOR_FIRST_NAME', u'H'), ('AUTHOR_LAST_NAME', u'Itou'), ('AUTHOR_FIRST_NAME', u'VA'), ('AUTHOR_LAST_NAME', u'Kovtunenko'), ('AUTHOR_FIRST_NAME', u'KR'), ('AUTHOR_LAST_NAME', u'Rajagopal'), ('TITLE', u'Contacting'), ('TITLE', u'crack'), ('TITLE', u'faces'), ('TITLE', u'within'), ('TITLE', u'the'), ('TITLE', u'context'), ('TITLE', u'of'), ('TITLE', u'bodies'), ('TITLE', u'exhibiting'), ('TITLE', u'limiting'), ('TITLE', u'strains'), ('JOURNAL', u'JSIAM'), ('JOURNAL', u'Lett.'), ('VOLUME', u'9'), ('YEAR', u'2017'), ('PAGE', u'61'), ('DOI', u'10.14495/jsiaml.9.61'), ('REFPLAINTEXT', u'Itou, H., Kovtunenko, V.A., Rajagopal, K.R.: Contacting crack faces within the context of bodies exhibiting limiting strains. JSIAM Lett. 9, 61\u201364 (2017)'), ('REFSTR', "{u'bibunstructured': u'Itou, H., Kovtunenko, V.A., Rajagopal, K.R.: Contacting crack faces within the context of bodies exhibiting limiting strains. JSIAM Lett. 9, 61\\u201364 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Itou', u'initials': u'H'}, {u'familyname': u'Kovtunenko', u'initials': u'VA'}, {u'familyname': u'Rajagopal', u'initials': u'KR'}], u'occurrence': [{u'handle': u'3705146', u'@type': u'AMSID'}, {u'handle': u'10.14495/jsiaml.9.61', u'@type': u'DOI'}], u'journaltitle': u'JSIAM Lett.', u'volumeid': u'9', u'firstpage': u'61', u'lastpage': u'64', u'year': u'2017', u'articletitle': {u'#text': u'Contacting crack faces within the context of bodies exhibiting limiting strains', u'@language': u'En'}}, u'citationnumber': u'8.', u'@id': u'CR8'}")],
[('AUTHOR_FIRST_NAME', u'B'), ('AUTHOR_LAST_NAME', u'Jadamba'), ('AUTHOR_FIRST_NAME', u'AA'), ('AUTHOR_LAST_NAME', u'Khan'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'Racitic'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'inverse'), ('TITLE', u'problem'), ('TITLE', u'of'), ('TITLE', u'identifying'), ('TITLE', u'Lam'), ('TITLE', u'coefficients'), ('TITLE', u'in'), ('TITLE', u'linear'), ('TITLE', u'elasticity'), ('JOURNAL', u'Comput.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Appl.'), ('VOLUME', u'56'), ('YEAR', u'2008'), ('PAGE', u'431'), ('DOI', u'10.1016/j.camwa.2007.12.016'), ('REFPLAINTEXT', u'Jadamba, B., Khan, A.A., Racitic, F.: On the inverse problem of identifying Lam\xe9 coefficients in linear elasticity. Comput. Math. Appl. 56, 431\u2013443 (2008)'), ('REFSTR', "{u'bibunstructured': u'Jadamba, B., Khan, A.A., Racitic, F.: On the inverse problem of identifying Lam\\xe9 coefficients in linear elasticity. Comput. Math. Appl. 56, 431\\u2013443 (2008)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Jadamba', u'initials': u'B'}, {u'familyname': u'Khan', u'initials': u'AA'}, {u'familyname': u'Racitic', u'initials': u'F'}], u'occurrence': [{u'handle': u'2442664', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.camwa.2007.12.016', u'@type': u'DOI'}], u'journaltitle': u'Comput. Math. Appl.', u'volumeid': u'56', u'firstpage': u'431', u'lastpage': u'443', u'year': u'2008', u'articletitle': {u'#text': u'On the inverse problem of identifying Lam\\xe9 coefficients in linear elasticity', u'@language': u'En'}}, u'citationnumber': u'9.', u'@id': u'CR9'}")],
[('AUTHOR_FIRST_NAME', u'AM'), ('AUTHOR_LAST_NAME', u'Khludnev'), ('AUTHOR_FIRST_NAME', u'VA'), ('AUTHOR_LAST_NAME', u'Kovtunenko'), ('YEAR', u'2000'), ('PUBLISHER', u'Analysis'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Cracks'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Solids'), ('REFPLAINTEXT', u'Khludnev, A.M., Kovtunenko, V.A.: Analysis of Cracks in Solids. WIT Press, Southampton (2000)'), ('REFSTR', "{u'bibunstructured': u'Khludnev, A.M., Kovtunenko, V.A.: Analysis of Cracks in Solids. WIT Press, Southampton (2000)', u'citationnumber': u'10.', u'@id': u'CR10', u'bibbook': {u'publisherlocation': u'Southampton', u'bibauthorname': [{u'familyname': u'Khludnev', u'initials': u'AM'}, {u'familyname': u'Kovtunenko', u'initials': u'VA'}], u'publishername': u'WIT Press', u'booktitle': u'Analysis of Cracks in Solids', u'year': u'2000'}}")],
[('AUTHOR_FIRST_NAME', u'AM'), ('AUTHOR_LAST_NAME', u'Khludnev'), ('YEAR', u'2010'), ('PUBLISHER', u'Elasticity'), ('PUBLISHER', u'Problems'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Non-'), ('PUBLISHER', u'smooth'), ('PUBLISHER', u'Domains'), ('REFPLAINTEXT', u'Khludnev, A.M.: Elasticity Problems in Non-smooth Domains. Fizmatlit, Moscow (2010)'), ('REFSTR', "{u'bibunstructured': u'Khludnev, A.M.: Elasticity Problems in Non-smooth Domains. Fizmatlit, Moscow (2010)', u'citationnumber': u'11.', u'@id': u'CR11', u'bibbook': {u'publisherlocation': u'Moscow', u'bibauthorname': {u'familyname': u'Khludnev', u'initials': u'AM'}, u'publishername': u'Fizmatlit', u'booktitle': u'Elasticity Problems in Non-smooth Domains', u'year': u'2010'}}")],
[('AUTHOR_FIRST_NAME', u'AM'), ('AUTHOR_LAST_NAME', u'Khludnev'), ('AUTHOR_FIRST_NAME', u'TS'), ('AUTHOR_LAST_NAME', u'Popova'), ('TITLE', u'Semirigid'), ('TITLE', u'inclusions'), ('TITLE', u'in'), ('TITLE', u'elastic'), ('TITLE', u'bodies:'), ('TITLE', u'mechanical'), ('TITLE', u'interplay'), ('TITLE', u'and'), ('TITLE', u'optimal'), ('TITLE', u'control'), ('JOURNAL', u'Comput.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Appl.'), ('VOLUME', u'77'), ('YEAR', u'2019'), ('PAGE', u'253'), ('DOI', u'10.1016/j.camwa.2018.09.030'), ('REFPLAINTEXT', u'Khludnev, A.M., Popova, T.S.: Semirigid inclusions in elastic bodies: mechanical interplay and optimal control. Comput. Math. Appl. 77, 253\u2013262 (2019)'), ('REFSTR', "{u'bibunstructured': u'Khludnev, A.M., Popova, T.S.: Semirigid inclusions in elastic bodies: mechanical interplay and optimal control. Comput. Math. Appl. 77, 253\\u2013262 (2019)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Khludnev', u'initials': u'AM'}, {u'familyname': u'Popova', u'initials': u'TS'}], u'occurrence': [{u'handle': u'3907414', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.camwa.2018.09.030', u'@type': u'DOI'}], u'journaltitle': u'Comput. Math. Appl.', u'volumeid': u'77', u'firstpage': u'253', u'lastpage': u'262', u'year': u'2019', u'articletitle': {u'#text': u'Semirigid inclusions in elastic bodies: mechanical interplay and optimal control', u'@language': u'En'}}, u'citationnumber': u'12.', u'@id': u'CR12'}")],
[('AUTHOR_FIRST_NAME', u'AM'), ('AUTHOR_LAST_NAME', u'Khludnev'), ('TITLE', u'Rigidity'), ('TITLE', u'parameter'), ('TITLE', u'identification'), ('TITLE', u'for'), ('TITLE', u'thin'), ('TITLE', u'inclusions'), ('TITLE', u'located'), ('TITLE', u'inside'), ('TITLE', u'elastic'), ('TITLE', u'bodies'), ('JOURNAL', u'J.'), ('JOURNAL', u'Opt.'), ('JOURNAL', u'Theory'), ('JOURNAL', u'Appl.'), ('VOLUME', u'172'), ('YEAR', u'2017'), ('PAGE', u'281'), ('DOI', u'10.1007/s10957-016-1025-8'), ('REFPLAINTEXT', u'Khludnev, A.M.: Rigidity parameter identification for thin inclusions located inside elastic bodies. J. Opt. Theory Appl. 172, 281\u2013297 (2017)'), ('REFSTR', "{u'bibunstructured': u'Khludnev, A.M.: Rigidity parameter identification for thin inclusions located inside elastic bodies. J. Opt. Theory Appl. 172, 281\\u2013297 (2017)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Khludnev', u'initials': u'AM'}, u'occurrence': [{u'handle': u'3596873', u'@type': u'AMSID'}, {u'handle': u'10.1007/s10957-016-1025-8', u'@type': u'DOI'}], u'journaltitle': u'J. Opt. Theory Appl.', u'volumeid': u'172', u'firstpage': u'281', u'lastpage': u'297', u'year': u'2017', u'articletitle': {u'#text': u'Rigidity parameter identification for thin inclusions located inside elastic bodies', u'@language': u'En'}}, u'citationnumber': u'13.', u'@id': u'CR13'}")],
[('AUTHOR_FIRST_NAME', u'AM'), ('AUTHOR_LAST_NAME', u'Khludnev'), ('TITLE', u'Equilibrium'), ('TITLE', u'of'), ('TITLE', u'an'), ('TITLE', u'elastic'), ('TITLE', u'body'), ('TITLE', u'with'), ('TITLE', u'closely'), ('TITLE', u'spaced'), ('TITLE', u'thin'), ('TITLE', u'inclusions'), ('JOURNAL', u'Comput.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'58'), ('YEAR', u'2018'), ('PAGE', u'1660'), ('DOI', u'10.1134/S096554251810007X'), ('REFPLAINTEXT', u'Khludnev, A.M.: Equilibrium of an elastic body with closely spaced thin inclusions. Comput. Math. Math. Phys. 58, 1660\u20131672 (2018)'), ('REFSTR', "{u'bibunstructured': u'Khludnev, A.M.: Equilibrium of an elastic body with closely spaced thin inclusions. Comput. Math. Math. Phys. 58, 1660\\u20131672 (2018)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Khludnev', u'initials': u'AM'}, u'occurrence': [{u'handle': u'3874046', u'@type': u'AMSID'}, {u'handle': u'10.1134/S096554251810007X', u'@type': u'DOI'}], u'journaltitle': u'Comput. Math. Math. Phys.', u'volumeid': u'58', u'firstpage': u'1660', u'lastpage': u'1672', u'year': u'2018', u'articletitle': {u'#text': u'Equilibrium of an elastic body with closely spaced thin inclusions', u'@language': u'En'}}, u'citationnumber': u'14.', u'@id': u'CR14'}")],
[('AUTHOR_FIRST_NAME', u'AM'), ('AUTHOR_LAST_NAME', u'Khludnev'), ('TITLE', u'Thin'), ('TITLE', u'inclusions'), ('TITLE', u'in'), ('TITLE', u'elastic'), ('TITLE', u'bodies'), ('TITLE', u'crossing'), ('TITLE', u'an'), ('TITLE', u'external'), ('TITLE', u'boundary'), ('JOURNAL', u'Z.'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('VOLUME', u'95'), ('YEAR', u'2015'), ('PAGE', u'1256'), ('DOI', u'10.1002/zamm.201400103'), ('REFPLAINTEXT', u'Khludnev, A.M.: Thin inclusions in elastic bodies crossing an external boundary. Z. Angew. Math. Mech. 95, 1256\u20131267 (2015)'), ('REFSTR', "{u'bibunstructured': u'Khludnev, A.M.: Thin inclusions in elastic bodies crossing an external boundary. Z. Angew. Math. Mech. 95, 1256\\u20131267 (2015)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Khludnev', u'initials': u'AM'}, u'occurrence': [{u'handle': u'3424462', u'@type': u'AMSID'}, {u'handle': u'10.1002/zamm.201400103', u'@type': u'DOI'}], u'journaltitle': u'Z. Angew. Math. Mech.', u'volumeid': u'95', u'firstpage': u'1256', u'lastpage': u'1267', u'year': u'2015', u'articletitle': {u'#text': u'Thin inclusions in elastic bodies crossing an external boundary', u'@language': u'En'}}, u'citationnumber': u'15.', u'@id': u'CR15'}")],
[('AUTHOR_FIRST_NAME', u'AM'), ('AUTHOR_LAST_NAME', u'Khludnev'), ('AUTHOR_FIRST_NAME', u'TS'), ('AUTHOR_LAST_NAME', u'Popova'), ('TITLE', u'Timoshenko'), ('TITLE', u'inclusions'), ('TITLE', u'in'), ('TITLE', u'elastic'), ('TITLE', u'bodies'), ('TITLE', u'crossing'), ('TITLE', u'an'), ('TITLE', u'external'), ('TITLE', u'boundary'), ('TITLE', u'at'), ('TITLE', u'zero'), ('TITLE', u'angle'), ('JOURNAL', u'Acta'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Solida'), ('JOURNAL', u'Sin.'), ('VOLUME', u'30'), ('YEAR', u'2017'), ('PAGE', u'327'), ('REFPLAINTEXT', u'Khludnev, A.M., Popova, T.S.: Timoshenko inclusions in elastic bodies crossing an external boundary at zero angle. Acta Mech. Solida Sin. 30, 327\u2013333 (2017)'), ('REFSTR', "{u'bibunstructured': u'Khludnev, A.M., Popova, T.S.: Timoshenko inclusions in elastic bodies crossing an external boundary at zero angle. Acta Mech. Solida Sin. 30, 327\\u2013333 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Khludnev', u'initials': u'AM'}, {u'familyname': u'Popova', u'initials': u'TS'}], u'occurrence': {u'handle': u'10.1016/j.camss.2017.05.005', u'@type': u'DOI'}, u'journaltitle': u'Acta Mech. Solida Sin.', u'volumeid': u'30', u'firstpage': u'327', u'lastpage': u'333', u'year': u'2017', u'articletitle': {u'#text': u'Timoshenko inclusions in elastic bodies crossing an external boundary at zero angle', u'@language': u'En'}}, u'citationnumber': u'16.', u'@id': u'CR16'}")],
[('AUTHOR_FIRST_NAME', u'AM'), ('AUTHOR_LAST_NAME', u'Khludnev'), ('TITLE', u'On'), ('TITLE', u'thin'), ('TITLE', u'inclusions'), ('TITLE', u'in'), ('TITLE', u'elastic'), ('TITLE', u'bodies'), ('TITLE', u'with'), ('TITLE', u'defects'), ('JOURNAL', u'Z.'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'70'), ('YEAR', u'2019'), ('PAGE', u'45'), ('DOI', u'10.1007/s00033-019-1091-5'), ('REFPLAINTEXT', u'Khludnev, A.M.: On thin inclusions in elastic bodies with defects. Z. Angew. Math. Phys. 70, 45 (2019)'), ('REFSTR', "{u'bibunstructured': u'Khludnev, A.M.: On thin inclusions in elastic bodies with defects. Z. Angew. Math. Phys. 70, 45 (2019)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Khludnev', u'initials': u'AM'}, u'occurrence': [{u'handle': u'3914948', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-019-1091-5', u'@type': u'DOI'}], u'journaltitle': u'Z. Angew. Math. Phys.', u'volumeid': u'70', u'firstpage': u'45', u'year': u'2019', u'articletitle': {u'#text': u'On thin inclusions in elastic bodies with defects', u'@language': u'En'}}, u'citationnumber': u'17.', u'@id': u'CR17'}")],
[('AUTHOR_FIRST_NAME', u'D'), ('AUTHOR_LAST_NAME', u'Knees'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Schroder'), ('TITLE', u'Global'), ('TITLE', u'spatial'), ('TITLE', u'regularity'), ('TITLE', u'for'), ('TITLE', u'elasticity'), ('TITLE', u'models'), ('TITLE', u'with'), ('TITLE', u'cracks,'), ('TITLE', u'contact'), ('TITLE', u'and'), ('TITLE', u'other'), ('TITLE', u'nonsmooth'), ('TITLE', u'constraints'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Methods'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'35'), ('YEAR', u'2012'), ('PAGE', u'1859'), ('DOI', u'10.1002/mma.2598'), ('REFPLAINTEXT', u'Knees, D., Schroder, A.: Global spatial regularity for elasticity models with cracks, contact and other nonsmooth constraints. Math. Methods Appl. Sci. 35, 1859\u20131884 (2012)'), ('REFSTR', "{u'bibunstructured': u'Knees, D., Schroder, A.: Global spatial regularity for elasticity models with cracks, contact and other nonsmooth constraints. Math. Methods Appl. Sci. 35, 1859\\u20131884 (2012)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Knees', u'initials': u'D'}, {u'familyname': u'Schroder', u'initials': u'A'}], u'occurrence': [{u'handle': u'2982470', u'@type': u'AMSID'}, {u'handle': u'10.1002/mma.2598', u'@type': u'DOI'}], u'journaltitle': u'Math. Methods Appl. Sci.', u'volumeid': u'35', u'firstpage': u'1859', u'lastpage': u'1884', u'year': u'2012', u'articletitle': {u'#text': u'Global spatial regularity for elasticity models with cracks, contact and other nonsmooth constraints', u'@language': u'En'}}, u'citationnumber': u'18.', u'@id': u'CR18'}")],
[('AUTHOR_FIRST_NAME', u'I'), ('AUTHOR_LAST_NAME', u'Knowles'), ('TITLE', u'Parameter'), ('TITLE', u'identification'), ('TITLE', u'for'), ('TITLE', u'elliptic'), ('TITLE', u'problems'), ('JOURNAL', u'J.'), ('JOURNAL', u'Comput.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'131'), ('YEAR', u'2001'), ('PAGE', u'175'), ('DOI', u'10.1016/S0377-0427(00)00275-2'), ('REFPLAINTEXT', u'Knowles, I.: Parameter identification for elliptic problems. J. Comput. Appl. Math. 131, 175\u2013194 (2001)'), ('REFSTR', "{u'bibunstructured': u'Knowles, I.: Parameter identification for elliptic problems. J. Comput. Appl. Math. 131, 175\\u2013194 (2001)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Knowles', u'initials': u'I'}, u'occurrence': [{u'handle': u'1835711', u'@type': u'AMSID'}, {u'handle': u'10.1016/S0377-0427(00)00275-2', u'@type': u'DOI'}], u'journaltitle': u'J. Comput. Appl. Math.', u'volumeid': u'131', u'firstpage': u'175', u'lastpage': u'194', u'year': u'2001', u'articletitle': {u'#text': u'Parameter identification for elliptic problems', u'@language': u'En'}}, u'citationnumber': u'19.', u'@id': u'CR19'}")],
[('AUTHOR_FIRST_NAME', u'VA'), ('AUTHOR_LAST_NAME', u'Kovtunenko'), ('TITLE', u'Primal-'), ('TITLE', u'dual'), ('TITLE', u'methods'), ('TITLE', u'of'), ('TITLE', u'shape'), ('TITLE', u'sensitivity'), ('TITLE', u'analysis'), ('TITLE', u'for'), ('TITLE', u'curvilinear'), ('TITLE', u'cracks'), ('TITLE', u'with'), ('TITLE', u'nonpenetration'), ('JOURNAL', u'IMA'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'71'), ('YEAR', u'2006'), ('PAGE', u'635'), ('DOI', u'10.1093/imamat/hxl014'), ('REFPLAINTEXT', u'Kovtunenko, V.A.: Primal-dual methods of shape sensitivity analysis for curvilinear cracks with nonpenetration. IMA J. Appl. Math. 71, 635\u2013657 (2006)'), ('REFSTR', "{u'bibunstructured': u'Kovtunenko, V.A.: Primal-dual methods of shape sensitivity analysis for curvilinear cracks with nonpenetration. IMA J. Appl. Math. 71, 635\\u2013657 (2006)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Kovtunenko', u'initials': u'VA'}, u'occurrence': [{u'handle': u'2268880', u'@type': u'AMSID'}, {u'handle': u'10.1093/imamat/hxl014', u'@type': u'DOI'}], u'journaltitle': u'IMA J. Appl. Math.', u'volumeid': u'71', u'firstpage': u'635', u'lastpage': u'657', u'year': u'2006', u'articletitle': {u'#text': u'Primal-dual methods of shape sensitivity analysis for curvilinear cracks with nonpenetration', u'@language': u'En'}}, u'citationnumber': u'20.', u'@id': u'CR20'}")],
[('AUTHOR_FIRST_NAME', u'VA'), ('AUTHOR_LAST_NAME', u'Kozlov'), ('AUTHOR_FIRST_NAME', u'VG'), ('AUTHOR_LAST_NAME', u'Mazya'), ('AUTHOR_FIRST_NAME', u'AB'), ('AUTHOR_LAST_NAME', u'Movchan'), ('YEAR', u'1999'), ('PUBLISHER', u'Asymptotic'), ('PUBLISHER', u'Analysis'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Fields'), ('PUBLISHER', u'in'), ('PUBLISHER', u'a'), ('PUBLISHER', u'Multi-'), ('PUBLISHER', u'structure.'), ('PUBLISHER', u'Oxford'), ('PUBLISHER', u'Mathematical'), ('PUBLISHER', u'Monographs'), ('REFPLAINTEXT', u'Kozlov, V.A., Mazya, V.G., Movchan, A.B.: Asymptotic Analysis of Fields in a Multi-structure. Oxford Mathematical Monographs. Oxford University Press, New York (1999)'), ('REFSTR', "{u'bibunstructured': u'Kozlov, V.A., Mazya, V.G., Movchan, A.B.: Asymptotic Analysis of Fields in a Multi-structure. Oxford Mathematical Monographs. Oxford University Press, New York (1999)', u'citationnumber': u'21.', u'@id': u'CR21', u'bibbook': {u'publisherlocation': u'New York', u'bibauthorname': [{u'familyname': u'Kozlov', u'initials': u'VA'}, {u'familyname': u'Mazya', u'initials': u'VG'}, {u'familyname': u'Movchan', u'initials': u'AB'}], u'publishername': u'Oxford University Press', u'booktitle': u'Asymptotic Analysis of Fields in a Multi-structure. Oxford Mathematical Monographs', u'year': u'1999'}}")],
[('AUTHOR_FIRST_NAME', u'NP'), ('AUTHOR_LAST_NAME', u'Lazarev'), ('TITLE', u'Shape'), ('TITLE', u'sensitivity'), ('TITLE', u'analysis'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'energy'), ('TITLE', u'integrals'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'Timoshenko-'), ('TITLE', u'type'), ('TITLE', u'plate'), ('TITLE', u'containing'), ('TITLE', u'a'), ('TITLE', u'crack'), ('TITLE', u'on'), ('TITLE', u'the'), ('TITLE', u'boundary'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'rigid'), ('TITLE', u'inclusion'), ('JOURNAL', u'Z.'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'66'), ('YEAR', u'2015'), ('PAGE', u'2025'), ('DOI', u'10.1007/s00033-014-0488-4'), ('REFPLAINTEXT', u'Lazarev, N.P.: Shape sensitivity analysis of the energy integrals for the Timoshenko-type plate containing a crack on the boundary of a rigid inclusion. Z. Angew. Math. Phys. 66, 2025\u20132040 (2015)'), ('REFSTR', "{u'bibunstructured': u'Lazarev, N.P.: Shape sensitivity analysis of the energy integrals for the Timoshenko-type plate containing a crack on the boundary of a rigid inclusion. Z. Angew. Math. Phys. 66, 2025\\u20132040 (2015)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Lazarev', u'initials': u'NP'}, u'occurrence': [{u'handle': u'3377729', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-014-0488-4', u'@type': u'DOI'}], u'journaltitle': u'Z. Angew. Math. Phys.', u'volumeid': u'66', u'firstpage': u'2025', u'lastpage': u'2040', u'year': u'2015', u'articletitle': {u'#text': u'Shape sensitivity analysis of the energy integrals for the Timoshenko-type plate containing a crack on the boundary of a rigid inclusion', u'@language': u'En'}}, u'citationnumber': u'22.', u'@id': u'CR22'}")],
[('AUTHOR_FIRST_NAME', u'NP'), ('AUTHOR_LAST_NAME', u'Lazarev'), ('AUTHOR_FIRST_NAME', u'EM'), ('AUTHOR_LAST_NAME', u'Rudoy'), ('TITLE', u'Shape'), ('TITLE', u'sensitivity'), ('TITLE', u'analysis'), ('TITLE', u'of'), ('TITLE', u'Timoshenkos'), ('TITLE', u'plate'), ('TITLE', u'with'), ('TITLE', u'a'), ('TITLE', u'crack'), ('TITLE', u'under'), ('TITLE', u'the'), ('TITLE', u'nonpenetration'), ('TITLE', u'condition'), ('JOURNAL', u'Z.'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('VOLUME', u'94'), ('YEAR', u'2014'), ('PAGE', u'730'), ('DOI', u'10.1002/zamm.201200229'), ('REFPLAINTEXT', u'Lazarev, N.P., Rudoy, E.M.: Shape sensitivity analysis of Timoshenko\u2019s plate with a crack under the nonpenetration condition. Z. Angew. Math. Mech. 94, 730\u2013739 (2014)'), ('REFSTR', "{u'bibunstructured': u'Lazarev, N.P., Rudoy, E.M.: Shape sensitivity analysis of Timoshenko\\u2019s plate with a crack under the nonpenetration condition. Z. Angew. Math. Mech. 94, 730\\u2013739 (2014)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Lazarev', u'initials': u'NP'}, {u'familyname': u'Rudoy', u'initials': u'EM'}], u'occurrence': [{u'handle': u'3259385', u'@type': u'AMSID'}, {u'handle': u'10.1002/zamm.201200229', u'@type': u'DOI'}], u'journaltitle': u'Z. Angew. Math. Mech.', u'volumeid': u'94', u'firstpage': u'730', u'lastpage': u'739', u'year': u'2014', u'articletitle': {u'#text': u'Shape sensitivity analysis of Timoshenko\\u2019s plate with a crack under the nonpenetration condition', u'@language': u'En'}}, u'citationnumber': u'23.', u'@id': u'CR23'}")],
[('AUTHOR_FIRST_NAME', u'NP'), ('AUTHOR_LAST_NAME', u'Lazarev'), ('AUTHOR_FIRST_NAME', u'EM'), ('AUTHOR_LAST_NAME', u'Rudoy'), ('TITLE', u'Optimal'), ('TITLE', u'size'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'rigid'), ('TITLE', u'thin'), ('TITLE', u'stiffener'), ('TITLE', u'reinforcing'), ('TITLE', u'an'), ('TITLE', u'elastic'), ('TITLE', u'plate'), ('TITLE', u'on'), ('TITLE', u'the'), ('TITLE', u'outer'), ('TITLE', u'edge'), ('JOURNAL', u'Z.'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('VOLUME', u'97'), ('YEAR', u'2017'), ('PAGE', u'716'), ('DOI', u'10.1002/zamm.201600291'), ('REFPLAINTEXT', u'Lazarev, N.P., Rudoy, E.M.: Optimal size of a rigid thin stiffener reinforcing an elastic plate on the outer edge. Z. Angew. Math. Mech. 97, 716\u2013730 (2017)'), ('REFSTR', "{u'bibunstructured': u'Lazarev, N.P., Rudoy, E.M.: Optimal size of a rigid thin stiffener reinforcing an elastic plate on the outer edge. Z. Angew. Math. Mech. 97, 716\\u2013730 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Lazarev', u'initials': u'NP'}, {u'familyname': u'Rudoy', u'initials': u'EM'}], u'occurrence': [{u'handle': u'3689455', u'@type': u'AMSID'}, {u'handle': u'10.1002/zamm.201600291', u'@type': u'DOI'}], u'journaltitle': u'Z. Angew. Math. Mech.', u'volumeid': u'97', u'firstpage': u'716', u'lastpage': u'730', u'year': u'2017', u'articletitle': {u'#text': u'Optimal size of a rigid thin stiffener reinforcing an elastic plate on the outer edge', u'@language': u'En'}}, u'citationnumber': u'24.', u'@id': u'CR24'}")],
[('AUTHOR_FIRST_NAME', u'PK'), ('AUTHOR_LAST_NAME', u'Mallick'), ('YEAR', u'1993'), ('PUBLISHER', u'Fiber-'), ('PUBLISHER', u'Reinforced'), ('PUBLISHER', u'Composites.'), ('PUBLISHER', u'Materials,'), ('PUBLISHER', u'Manufacturing,'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Design'), ('REFPLAINTEXT', u'Mallick, P.K.: Fiber-Reinforced Composites. Materials, Manufacturing, and Design. Marcel Dekker, New York (1993)'), ('REFSTR', "{u'bibunstructured': u'Mallick, P.K.: Fiber-Reinforced Composites. Materials, Manufacturing, and Design. Marcel Dekker, New York (1993)', u'citationnumber': u'25.', u'@id': u'CR25', u'bibbook': {u'publisherlocation': u'New York', u'bibauthorname': {u'familyname': u'Mallick', u'initials': u'PK'}, u'publishername': u'Marcel Dekker', u'booktitle': u'Fiber-Reinforced Composites. Materials, Manufacturing, and Design', u'year': u'1993'}}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Nakamura'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Uhlmann'), ('TITLE', u'Identification'), ('TITLE', u'of'), ('TITLE', u'Lame'), ('TITLE', u'parameters'), ('TITLE', u'by'), ('TITLE', u'boundary'), ('TITLE', u'measurements'), ('JOURNAL', u'Am.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Math.'), ('VOLUME', u'115'), ('YEAR', u'1993'), ('PAGE', u'1161'), ('DOI', u'10.2307/2375069'), ('REFPLAINTEXT', u'Nakamura, G., Uhlmann, G.: Identification of Lame parameters by boundary measurements. Am. J. Math. 115, 1161\u20131187 (1993)'), ('REFSTR', "{u'bibunstructured': u'Nakamura, G., Uhlmann, G.: Identification of Lame parameters by boundary measurements. Am. J. Math. 115, 1161\\u20131187 (1993)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Nakamura', u'initials': u'G'}, {u'familyname': u'Uhlmann', u'initials': u'G'}], u'occurrence': [{u'handle': u'1246188', u'@type': u'AMSID'}, {u'handle': u'10.2307/2375069', u'@type': u'DOI'}], u'journaltitle': u'Am. J. Math.', u'volumeid': u'115', u'firstpage': u'1161', u'lastpage': u'1187', u'year': u'1993', u'articletitle': {u'#text': u'Identification of Lame parameters by boundary measurements', u'@language': u'En'}}, u'citationnumber': u'26.', u'@id': u'CR26'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Nakamura'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Uhlmann'), ('TITLE', u'Global'), ('TITLE', u'uniqueness'), ('TITLE', u'for'), ('TITLE', u'an'), ('TITLE', u'inverse'), ('TITLE', u'boundary'), ('TITLE', u'value'), ('TITLE', u'problem'), ('TITLE', u'arising'), ('TITLE', u'in'), ('TITLE', u'elasticity'), ('JOURNAL', u'Invent.'), ('JOURNAL', u'Math.'), ('VOLUME', u'118'), ('YEAR', u'1994'), ('PAGE', u'457'), ('DOI', u'10.1007/BF01231541'), ('REFPLAINTEXT', u'Nakamura, G., Uhlmann, G.: Global uniqueness for an inverse boundary value problem arising in elasticity. Invent. Math. 118, 457\u2013474 (1994)'), ('REFSTR', "{u'bibunstructured': u'Nakamura, G., Uhlmann, G.: Global uniqueness for an inverse boundary value problem arising in elasticity. Invent. Math. 118, 457\\u2013474 (1994)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Nakamura', u'initials': u'G'}, {u'familyname': u'Uhlmann', u'initials': u'G'}], u'occurrence': [{u'handle': u'1296354', u'@type': u'AMSID'}, {u'handle': u'10.1007/BF01231541', u'@type': u'DOI'}], u'journaltitle': u'Invent. Math.', u'volumeid': u'118', u'firstpage': u'457', u'lastpage': u'474', u'year': u'1994', u'articletitle': {u'#text': u'Global uniqueness for an inverse boundary value problem arising in elasticity', u'@language': u'En'}}, u'citationnumber': u'27.', u'@id': u'CR27'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Panasenko'), ('YEAR', u'2005'), ('PUBLISHER', u'Multi-'), ('PUBLISHER', u'scale'), ('PUBLISHER', u'Modelling'), ('PUBLISHER', u'for'), ('PUBLISHER', u'Structures'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Composites'), ('REFPLAINTEXT', u'Panasenko, G.: Multi-scale Modelling for Structures and Composites. Springer, New York (2005)'), ('REFSTR', "{u'bibunstructured': u'Panasenko, G.: Multi-scale Modelling for Structures and Composites. Springer, New York (2005)', u'citationnumber': u'28.', u'@id': u'CR28', u'bibbook': {u'bibauthorname': {u'familyname': u'Panasenko', u'initials': u'G'}, u'publisherlocation': u'New York', u'occurrence': {u'handle': u'1078.74002', u'@type': u'ZLBID'}, u'booktitle': u'Multi-scale Modelling for Structures and Composites', u'year': u'2005', u'publishername': u'Springer'}}")],
[('AUTHOR_FIRST_NAME', u'IM'), ('AUTHOR_LAST_NAME', u'Pasternak'), ('TITLE', u'Plane'), ('TITLE', u'problem'), ('TITLE', u'of'), ('TITLE', u'elasticity'), ('TITLE', u'theory'), ('TITLE', u'for'), ('TITLE', u'anisotropic'), ('TITLE', u'bodies'), ('TITLE', u'with'), ('TITLE', u'thin'), ('TITLE', u'elastic'), ('TITLE', u'inclusions'), ('JOURNAL', u'J.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'186'), ('YEAR', u'2012'), ('PAGE', u'31'), ('DOI', u'10.1007/s10958-012-0971-4'), ('REFPLAINTEXT', u'Pasternak, I.M.: Plane problem of elasticity theory for anisotropic bodies with thin elastic inclusions. J. Math. Sci. 186, 31\u201347 (2012)'), ('REFSTR', "{u'bibunstructured': u'Pasternak, I.M.: Plane problem of elasticity theory for anisotropic bodies with thin elastic inclusions. J. Math. Sci. 186, 31\\u201347 (2012)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Pasternak', u'initials': u'IM'}, u'occurrence': [{u'handle': u'2933721', u'@type': u'AMSID'}, {u'handle': u'10.1007/s10958-012-0971-4', u'@type': u'DOI'}], u'journaltitle': u'J. Math. Sci.', u'volumeid': u'186', u'firstpage': u'31', u'lastpage': u'47', u'year': u'2012', u'articletitle': {u'#text': u'Plane problem of elasticity theory for anisotropic bodies with thin elastic inclusions', u'@language': u'En'}}, u'citationnumber': u'29.', u'@id': u'CR29'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Sofonea'), ('AUTHOR_FIRST_NAME', u'Y-B'), ('AUTHOR_LAST_NAME', u'Xiao'), ('TITLE', u'Boundary'), ('TITLE', u'optimal'), ('TITLE', u'control'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'nonsmooth'), ('TITLE', u'frictionless'), ('TITLE', u'contact'), ('TITLE', u'problem'), ('JOURNAL', u'Comput.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Appl.'), ('VOLUME', u'78'), ('YEAR', u'2019'), ('PAGE', u'152'), ('DOI', u'10.1016/j.camwa.2019.02.027'), ('REFPLAINTEXT', u'Sofonea, M., Xiao, Y.-B.: Boundary optimal control of a nonsmooth frictionless contact problem. Comput. Math. Appl. 78, 152\u2013165 (2019)'), ('REFSTR', "{u'bibunstructured': u'Sofonea, M., Xiao, Y.-B.: Boundary optimal control of a nonsmooth frictionless contact problem. Comput. Math. Appl. 78, 152\\u2013165 (2019)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Sofonea', u'initials': u'M'}, {u'familyname': u'Xiao', u'initials': u'Y-B'}], u'occurrence': [{u'handle': u'3949682', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.camwa.2019.02.027', u'@type': u'DOI'}], u'journaltitle': u'Comput. Math. Appl.', u'volumeid': u'78', u'firstpage': u'152', u'lastpage': u'165', u'year': u'2019', u'articletitle': {u'#text': u'Boundary optimal control of a nonsmooth frictionless contact problem', u'@language': u'En'}}, u'citationnumber': u'30.', u'@id': u'CR30'}")],
[('AUTHOR_FIRST_NAME', u'VV'), ('AUTHOR_LAST_NAME', u'Shcherbakov'), ('TITLE', u'Choosing'), ('TITLE', u'an'), ('TITLE', u'optimal'), ('TITLE', u'shape'), ('TITLE', u'of'), ('TITLE', u'thin'), ('TITLE', u'rigid'), ('TITLE', u'inclusions'), ('TITLE', u'in'), ('TITLE', u'elastic'), ('TITLE', u'bodies'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Tech.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'56'), ('YEAR', u'2015'), ('PAGE', u'321'), ('DOI', u'10.1134/S0021894415020182'), ('REFPLAINTEXT', u'Shcherbakov, V.V.: Choosing an optimal shape of thin rigid inclusions in elastic bodies. J. Appl. Mech. Tech. Phys. 56, 321\u2013329 (2015)'), ('REFSTR', "{u'bibunstructured': u'Shcherbakov, V.V.: Choosing an optimal shape of thin rigid inclusions in elastic bodies. J. Appl. Mech. Tech. Phys. 56, 321\\u2013329 (2015)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Shcherbakov', u'initials': u'VV'}, u'occurrence': [{u'handle': u'3416031', u'@type': u'AMSID'}, {u'handle': u'10.1134/S0021894415020182', u'@type': u'DOI'}], u'journaltitle': u'J. Appl. Mech. Tech. Phys.', u'volumeid': u'56', u'firstpage': u'321', u'lastpage': u'329', u'year': u'2015', u'articletitle': {u'#text': u'Choosing an optimal shape of thin rigid inclusions in elastic bodies', u'@language': u'En'}}, u'citationnumber': u'31.', u'@id': u'CR31'}")],
[('AUTHOR_FIRST_NAME', u'VV'), ('AUTHOR_LAST_NAME', u'Shcherbakov'), ('TITLE', u'Energy'), ('TITLE', u'release'), ('TITLE', u'rates'), ('TITLE', u'for'), ('TITLE', u'interfacial'), ('TITLE', u'cracks'), ('TITLE', u'in'), ('TITLE', u'elastic'), ('TITLE', u'bodies'), ('TITLE', u'with'), ('TITLE', u'thin'), ('TITLE', u'semirigid'), ('TITLE', u'inclusions'), ('JOURNAL', u'Z.'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'68'), ('YEAR', u'2017'), ('PAGE', u'26'), ('DOI', u'10.1007/s00033-017-0769-9'), ('REFPLAINTEXT', u'Shcherbakov, V.V.: Energy release rates for interfacial cracks in elastic bodies with thin semirigid inclusions. Z. Angew. Math. Phys. 68, 26 (2017)'), ('REFSTR', "{u'bibunstructured': u'Shcherbakov, V.V.: Energy release rates for interfacial cracks in elastic bodies with thin semirigid inclusions. Z. Angew. Math. Phys. 68, 26 (2017)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Shcherbakov', u'initials': u'VV'}, u'occurrence': [{u'handle': u'3598792', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-017-0769-9', u'@type': u'DOI'}], u'journaltitle': u'Z. Angew. Math. Phys.', u'volumeid': u'68', u'firstpage': u'26', u'year': u'2017', u'articletitle': {u'#text': u'Energy release rates for interfacial cracks in elastic bodies with thin semirigid inclusions', u'@language': u'En'}}, u'citationnumber': u'32.', u'@id': u'CR32'}")],
[('AUTHOR_FIRST_NAME', u'BJ'), ('AUTHOR_LAST_NAME', u'Chen'), ('AUTHOR_FIRST_NAME', u'ZM'), ('AUTHOR_LAST_NAME', u'Xiao'), ('AUTHOR_FIRST_NAME', u'KM'), ('AUTHOR_LAST_NAME', u'Liew'), ('TITLE', u'Electroelastic'), ('TITLE', u'stress'), ('TITLE', u'analysis'), ('TITLE', u'for'), ('TITLE', u'a'), ('TITLE', u'wedge-'), ('TITLE', u'shaped'), ('TITLE', u'crack'), ('TITLE', u'interacting'), ('TITLE', u'with'), ('TITLE', u'a'), ('TITLE', u'screw'), ('TITLE', u'dislocation'), ('TITLE', u'in'), ('TITLE', u'piezoelectric'), ('TITLE', u'solid'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Eng.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'40'), ('YEAR', u'2002'), ('PAGE', u'621'), ('REFPLAINTEXT', u'Chen, B.J., Xiao, Z.M., Liew, K.M.: Electro\u2013elastic stress analysis for a wedge-shaped crack interacting with a screw dislocation in piezoelectric solid. Int. J. Eng. Sci. 40, 621\u2013635 (2002)'), ('REFSTR', "{u'bibunstructured': u'Chen, B.J., Xiao, Z.M., Liew, K.M.: Electro\\u2013elastic stress analysis for a wedge-shaped crack interacting with a screw dislocation in piezoelectric solid. Int. J. Eng. Sci. 40, 621\\u2013635 (2002)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Chen', u'initials': u'BJ'}, {u'familyname': u'Xiao', u'initials': u'ZM'}, {u'familyname': u'Liew', u'initials': u'KM'}], u'occurrence': {u'handle': u'10.1016/S0020-7225(01)00093-3', u'@type': u'DOI'}, u'journaltitle': u'Int. J. Eng. Sci.', u'volumeid': u'40', u'firstpage': u'621', u'lastpage': u'635', u'year': u'2002', u'articletitle': {u'#text': u'Electro\\u2013elastic stress analysis for a wedge-shaped crack interacting with a screw dislocation in piezoelectric solid', u'@outputmedium': u'All', u'@language': u'En'}}, u'citationnumber': u'1.', u'@id': u'CR1'}")],
[('AUTHOR_FIRST_NAME', u'BJ'), ('AUTHOR_LAST_NAME', u'Chen'), ('AUTHOR_FIRST_NAME', u'ZM'), ('AUTHOR_LAST_NAME', u'Xiao'), ('AUTHOR_FIRST_NAME', u'KM'), ('AUTHOR_LAST_NAME', u'Liew'), ('TITLE', u'A'), ('TITLE', u'line'), ('TITLE', u'dislocation'), ('TITLE', u'interacting'), ('TITLE', u'with'), ('TITLE', u'a'), ('TITLE', u'semi-'), ('TITLE', u'infinite'), ('TITLE', u'crack'), ('TITLE', u'in'), ('TITLE', u'piezoelectric'), ('TITLE', u'solid'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Eng.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'42'), ('YEAR', u'2004'), ('PAGE', u'1'), ('REFPLAINTEXT', u'Chen, B.J., Xiao, Z.M., Liew, K.M.: A line dislocation interacting with a semi-infinite crack in piezoelectric solid. Int. J. Eng. Sci. 42, 1\u201311 (2004)'), ('REFSTR', "{u'bibunstructured': u'Chen, B.J., Xiao, Z.M., Liew, K.M.: A line dislocation interacting with a semi-infinite crack in piezoelectric solid. Int. J. Eng. Sci. 42, 1\\u201311 (2004)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Chen', u'initials': u'BJ'}, {u'familyname': u'Xiao', u'initials': u'ZM'}, {u'familyname': u'Liew', u'initials': u'KM'}], u'occurrence': {u'handle': u'10.1016/S0020-7225(03)00279-9', u'@type': u'DOI'}, u'journaltitle': u'Int. J. Eng. Sci.', u'volumeid': u'42', u'firstpage': u'1', u'lastpage': u'11', u'year': u'2004', u'articletitle': {u'#text': u'A line dislocation interacting with a semi-infinite crack in piezoelectric solid', u'@language': u'En'}}, u'citationnumber': u'2.', u'@id': u'CR2'}")],
[('REFPLAINTEXT', u'Deeg, W.F.: The Analysis of Dislocation, Crack, and Inclusion Problems in Piezoelectric Solids. Ph.D. thesis, Stanford University, Stanford, CA (1980)'), ('REFSTR', "{u'bibunstructured': u'Deeg, W.F.: The Analysis of Dislocation, Crack, and Inclusion Problems in Piezoelectric Solids. Ph.D. thesis, Stanford University, Stanford, CA (1980)', u'citationnumber': u'3.', u'@id': u'CR3'}")],
[('AUTHOR_FIRST_NAME', u'KY'), ('AUTHOR_LAST_NAME', u'Lee'), ('AUTHOR_FIRST_NAME', u'WG'), ('AUTHOR_LAST_NAME', u'Lee'), ('AUTHOR_FIRST_NAME', u'YE'), ('AUTHOR_LAST_NAME', u'Pak'), ('TITLE', u'Interaction'), ('TITLE', u'between'), ('TITLE', u'a'), ('TITLE', u'semi-'), ('TITLE', u'infinite'), ('TITLE', u'crack'), ('TITLE', u'and'), ('TITLE', u'a'), ('TITLE', u'screw'), ('TITLE', u'dislocation'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'piezoelectric'), ('TITLE', u'material'), ('JOURNAL', u'ASME'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Mech.'), ('VOLUME', u'67'), ('YEAR', u'2000'), ('PAGE', u'165'), ('REFPLAINTEXT', u'Lee, K.Y., Lee, W.G., Pak, Y.E.: Interaction between a semi-infinite crack and a screw dislocation in a piezoelectric material. ASME J. Appl. Mech. 67, 165\u2013170 (2000)'), ('REFSTR', "{u'bibunstructured': u'Lee, K.Y., Lee, W.G., Pak, Y.E.: Interaction between a semi-infinite crack and a screw dislocation in a piezoelectric material. ASME J. Appl. Mech. 67, 165\\u2013170 (2000)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Lee', u'initials': u'KY'}, {u'familyname': u'Lee', u'initials': u'WG'}, {u'familyname': u'Pak', u'initials': u'YE'}], u'occurrence': {u'handle': u'10.1115/1.321172', u'@type': u'DOI'}, u'journaltitle': u'ASME J. Appl. Mech.', u'volumeid': u'67', u'firstpage': u'165', u'lastpage': u'170', u'year': u'2000', u'articletitle': {u'#text': u'Interaction between a semi-infinite crack and a screw dislocation in a piezoelectric material', u'@language': u'En'}}, u'citationnumber': u'4.', u'@id': u'CR4'}")],
[('AUTHOR_FIRST_NAME', u'CY'), ('AUTHOR_LAST_NAME', u'Li'), ('AUTHOR_FIRST_NAME', u'GJ'), ('AUTHOR_LAST_NAME', u'Weng'), ('TITLE', u'Yoffe-'), ('TITLE', u'type'), ('TITLE', u'moving'), ('TITLE', u'crack'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'functionally'), ('TITLE', u'graded'), ('TITLE', u'piezoelectric'), ('TITLE', u'material'), ('JOURNAL', u'Proc.'), ('JOURNAL', u'R.'), ('JOURNAL', u'Soc.'), ('JOURNAL', u'Lond.'), ('JOURNAL', u'A'), ('VOLUME', u'458'), ('YEAR', u'2002'), ('PAGE', u'381'), ('DOI', u'10.1098/rspa.2001.0873'), ('REFPLAINTEXT', u'Li, C.Y., Weng, G.J.: Yoffe-type moving crack in a functionally graded piezoelectric material. Proc. R. Soc. Lond. A 458, 381\u2013399 (2002)'), ('REFSTR', "{u'bibunstructured': u'Li, C.Y., Weng, G.J.: Yoffe-type moving crack in a functionally graded piezoelectric material. Proc. R. Soc. Lond. A 458, 381\\u2013399 (2002)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Li', u'initials': u'CY'}, {u'familyname': u'Weng', u'initials': u'GJ'}], u'occurrence': [{u'handle': u'1889934', u'@type': u'AMSID'}, {u'handle': u'10.1098/rspa.2001.0873', u'@type': u'DOI'}], u'journaltitle': u'Proc. R. Soc. Lond. A', u'volumeid': u'458', u'firstpage': u'381', u'lastpage': u'399', u'year': u'2002', u'articletitle': {u'#text': u'Yoffe-type moving crack in a functionally graded piezoelectric material', u'@language': u'En'}}, u'citationnumber': u'5.', u'@id': u'CR5'}")],
[('AUTHOR_FIRST_NAME', u'BS'), ('AUTHOR_LAST_NAME', u'Majumdar'), ('AUTHOR_FIRST_NAME', u'SJ'), ('AUTHOR_LAST_NAME', u'Burns'), ('TITLE', u'Crack'), ('TITLE', u'tip'), ('TITLE', u'shieldingan'), ('TITLE', u'elastic'), ('TITLE', u'theory'), ('TITLE', u'of'), ('TITLE', u'dislocations'), ('TITLE', u'and'), ('TITLE', u'dislocation'), ('TITLE', u'arrays'), ('TITLE', u'near'), ('TITLE', u'a'), ('TITLE', u'sharp'), ('TITLE', u'crack'), ('JOURNAL', u'Acta'), ('JOURNAL', u'Metall.'), ('VOLUME', u'29'), ('YEAR', u'1981'), ('PAGE', u'579'), ('REFPLAINTEXT', u'Majumdar, B.S., Burns, S.J.: Crack tip shielding\u2014an elastic theory of dislocations and dislocation arrays near a sharp crack. Acta Metall. 29, 579\u2013588 (1981)'), ('REFSTR', "{u'bibunstructured': u'Majumdar, B.S., Burns, S.J.: Crack tip shielding\\u2014an elastic theory of dislocations and dislocation arrays near a sharp crack. Acta Metall. 29, 579\\u2013588 (1981)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Majumdar', u'initials': u'BS'}, {u'familyname': u'Burns', u'initials': u'SJ'}], u'occurrence': {u'handle': u'10.1016/0001-6160(81)90139-5', u'@type': u'DOI'}, u'journaltitle': u'Acta Metall.', u'volumeid': u'29', u'firstpage': u'579', u'lastpage': u'588', u'year': u'1981', u'articletitle': {u'#text': u'Crack tip shielding\\u2014an elastic theory of dislocations and dislocation arrays near a sharp crack', u'@language': u'En'}}, u'citationnumber': u'6.', u'@id': u'CR6'}")],
[('AUTHOR_FIRST_NAME', u'SA'), ('AUTHOR_LAST_NAME', u'Meguid'), ('AUTHOR_FIRST_NAME', u'W'), ('AUTHOR_LAST_NAME', u'Deng'), ('TITLE', u'Electroelastic'), ('TITLE', u'interaction'), ('TITLE', u'between'), ('TITLE', u'a'), ('TITLE', u'screw'), ('TITLE', u'dislocation'), ('TITLE', u'and'), ('TITLE', u'an'), ('TITLE', u'elliptical'), ('TITLE', u'inhomogeneity'), ('TITLE', u'in'), ('TITLE', u'piezoelectric'), ('TITLE', u'materials'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Solids'), ('JOURNAL', u'Struct.'), ('VOLUME', u'35'), ('YEAR', u'1998'), ('PAGE', u'1467'), ('REFPLAINTEXT', u'Meguid, S.A., Deng, W.: Electro\u2013elastic interaction between a screw dislocation and an elliptical inhomogeneity in piezoelectric materials. Int. J. Solids Struct. 35, 1467\u20131482 (1998)'), ('REFSTR', "{u'bibunstructured': u'Meguid, S.A., Deng, W.: Electro\\u2013elastic interaction between a screw dislocation and an elliptical inhomogeneity in piezoelectric materials. Int. J. Solids Struct. 35, 1467\\u20131482 (1998)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Meguid', u'initials': u'SA'}, {u'familyname': u'Deng', u'initials': u'W'}], u'occurrence': {u'handle': u'10.1016/S0020-7683(97)00116-9', u'@type': u'DOI'}, u'journaltitle': u'Int. J. Solids Struct.', u'volumeid': u'35', u'firstpage': u'1467', u'lastpage': u'1482', u'year': u'1998', u'articletitle': {u'#text': u'Electro\\u2013elastic interaction between a screw dislocation and an elliptical inhomogeneity in piezoelectric materials', u'@language': u'En'}}, u'citationnumber': u'7.', u'@id': u'CR7'}")],
[('AUTHOR_FIRST_NAME', u'YE'), ('AUTHOR_LAST_NAME', u'Pak'), ('TITLE', u'Crack'), ('TITLE', u'extension'), ('TITLE', u'force'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'piezoelectric'), ('TITLE', u'material'), ('JOURNAL', u'ASME'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Mech.'), ('VOLUME', u'57'), ('YEAR', u'1990'), ('PAGE', u'647'), ('REFPLAINTEXT', u'Pak, Y.E.: Crack extension force in a piezoelectric material. ASME J. Appl. Mech. 57, 647\u2013653 (1990a)'), ('REFSTR', "{u'bibunstructured': u'Pak, Y.E.: Crack extension force in a piezoelectric material. ASME J. Appl. Mech. 57, 647\\u2013653 (1990a)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Pak', u'initials': u'YE'}, u'occurrence': {u'handle': u'10.1115/1.2897071', u'@type': u'DOI'}, u'journaltitle': u'ASME J. Appl. Mech.', u'volumeid': u'57', u'firstpage': u'647', u'lastpage': u'653', u'year': u'1990', u'articletitle': {u'#text': u'Crack extension force in a piezoelectric material', u'@language': u'En'}}, u'citationnumber': u'8.', u'@id': u'CR8'}")],
[('AUTHOR_FIRST_NAME', u'YE'), ('AUTHOR_LAST_NAME', u'Pak'), ('TITLE', u'Force'), ('TITLE', u'on'), ('TITLE', u'a'), ('TITLE', u'piezoelectric'), ('TITLE', u'screw'), ('TITLE', u'dislocation'), ('JOURNAL', u'ASME'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Mech.'), ('VOLUME', u'57'), ('YEAR', u'1990'), ('PAGE', u'863'), ('REFPLAINTEXT', u'Pak, Y.E.: Force on a piezoelectric screw dislocation. ASME J. Appl. Mech. 57, 863\u2013869 (1990b)'), ('REFSTR', "{u'bibunstructured': u'Pak, Y.E.: Force on a piezoelectric screw dislocation. ASME J. Appl. Mech. 57, 863\\u2013869 (1990b)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Pak', u'initials': u'YE'}, u'occurrence': {u'handle': u'10.1115/1.2897653', u'@type': u'DOI'}, u'journaltitle': u'ASME J. Appl. Mech.', u'volumeid': u'57', u'firstpage': u'863', u'lastpage': u'869', u'year': u'1990', u'articletitle': {u'#text': u'Force on a piezoelectric screw dislocation', u'@language': u'En'}}, u'citationnumber': u'9.', u'@id': u'CR9'}")],
[('AUTHOR_FIRST_NAME', u'YE'), ('AUTHOR_LAST_NAME', u'Pak'), ('TITLE', u'Circular'), ('TITLE', u'inclusion'), ('TITLE', u'problem'), ('TITLE', u'in'), ('TITLE', u'antiplane'), ('TITLE', u'piezoelectricity'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Solids'), ('JOURNAL', u'Struct.'), ('VOLUME', u'29'), ('YEAR', u'1992'), ('PAGE', u'2403'), ('REFPLAINTEXT', u'Pak, Y.E.: Circular inclusion problem in antiplane piezoelectricity. Int. J. Solids Struct. 29, 2403\u20132419 (1992)'), ('REFSTR', "{u'bibunstructured': u'Pak, Y.E.: Circular inclusion problem in antiplane piezoelectricity. Int. J. Solids Struct. 29, 2403\\u20132419 (1992)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Pak', u'initials': u'YE'}, u'occurrence': {u'handle': u'10.1016/0020-7683(92)90223-G', u'@type': u'DOI'}, u'journaltitle': u'Int. J. Solids Struct.', u'volumeid': u'29', u'firstpage': u'2403', u'lastpage': u'2419', u'year': u'1992', u'articletitle': {u'#text': u'Circular inclusion problem in antiplane piezoelectricity', u'@language': u'En'}}, u'citationnumber': u'10.', u'@id': u'CR10'}")],
[('AUTHOR_FIRST_NAME', u'CQ'), ('AUTHOR_LAST_NAME', u'Ru'), ('TITLE', u'Analytic'), ('TITLE', u'solution'), ('TITLE', u'for'), ('TITLE', u'Eshelbys'), ('TITLE', u'problem'), ('TITLE', u'of'), ('TITLE', u'an'), ('TITLE', u'inclusion'), ('TITLE', u'of'), ('TITLE', u'arbitrary'), ('TITLE', u'shape'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'plane'), ('TITLE', u'or'), ('TITLE', u'half-'), ('TITLE', u'plane'), ('JOURNAL', u'ASME'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Mech.'), ('VOLUME', u'66'), ('YEAR', u'1999'), ('PAGE', u'315'), ('DOI', u'10.1115/1.2791051'), ('REFPLAINTEXT', u'Ru, C.Q.: Analytic solution for Eshelby\u2019s problem of an inclusion of arbitrary shape in a plane or half-plane. ASME J. Appl. Mech. 66, 315\u2013322 (1999)'), ('REFSTR', "{u'bibunstructured': u'Ru, C.Q.: Analytic solution for Eshelby\\u2019s problem of an inclusion of arbitrary shape in a plane or half-plane. ASME J. Appl. Mech. 66, 315\\u2013322 (1999)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Ru', u'initials': u'CQ'}, u'occurrence': [{u'handle': u'1698732', u'@type': u'AMSID'}, {u'handle': u'10.1115/1.2791051', u'@type': u'DOI'}], u'journaltitle': u'ASME J. Appl. Mech.', u'volumeid': u'66', u'firstpage': u'315', u'lastpage': u'322', u'year': u'1999', u'articletitle': {u'#text': u'Analytic solution for Eshelby\\u2019s problem of an inclusion of arbitrary shape in a plane or half-plane', u'@language': u'En'}}, u'citationnumber': u'11.', u'@id': u'CR11'}")],
[('AUTHOR_FIRST_NAME', u'ZG'), ('AUTHOR_LAST_NAME', u'Suo'), ('TITLE', u'Singularities'), ('TITLE', u'interacting'), ('TITLE', u'with'), ('TITLE', u'interfaces'), ('TITLE', u'and'), ('TITLE', u'cracks'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Solids'), ('JOURNAL', u'Struct.'), ('VOLUME', u'25'), ('YEAR', u'1989'), ('PAGE', u'1133'), ('REFPLAINTEXT', u'Suo, Z.G.: Singularities interacting with interfaces and cracks. Int. J. Solids Struct. 25, 1133\u20131142 (1989)'), ('REFSTR', "{u'bibunstructured': u'Suo, Z.G.: Singularities interacting with interfaces and cracks. Int. J. Solids Struct. 25, 1133\\u20131142 (1989)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Suo', u'initials': u'ZG'}, u'occurrence': {u'handle': u'10.1016/0020-7683(89)90096-6', u'@type': u'DOI'}, u'journaltitle': u'Int. J. Solids Struct.', u'volumeid': u'25', u'firstpage': u'1133', u'lastpage': u'1142', u'year': u'1989', u'articletitle': {u'#text': u'Singularities interacting with interfaces and cracks', u'@language': u'En'}}, u'citationnumber': u'12.', u'@id': u'CR12'}")],
[('AUTHOR_FIRST_NAME', u'Z'), ('AUTHOR_LAST_NAME', u'Suo'), ('AUTHOR_FIRST_NAME', u'CM'), ('AUTHOR_LAST_NAME', u'Kuo'), ('AUTHOR_FIRST_NAME', u'DM'), ('AUTHOR_LAST_NAME', u'Barnett'), ('AUTHOR_FIRST_NAME', u'JR'), ('AUTHOR_LAST_NAME', u'Willis'), ('TITLE', u'Fracture'), ('TITLE', u'mechanics'), ('TITLE', u'for'), ('TITLE', u'piezoelectric'), ('TITLE', u'ceramics'), ('JOURNAL', u'J.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Solids'), ('VOLUME', u'40'), ('YEAR', u'1992'), ('PAGE', u'739'), ('DOI', u'10.1016/0022-5096(92)90002-J'), ('REFPLAINTEXT', u'Suo, Z., Kuo, C.M., Barnett, D.M., Willis, J.R.: Fracture mechanics for piezoelectric ceramics. J. Mech. Phys. Solids 40, 739\u2013765 (1992)'), ('REFSTR', "{u'bibunstructured': u'Suo, Z., Kuo, C.M., Barnett, D.M., Willis, J.R.: Fracture mechanics for piezoelectric ceramics. J. Mech. Phys. Solids 40, 739\\u2013765 (1992)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Suo', u'initials': u'Z'}, {u'familyname': u'Kuo', u'initials': u'CM'}, {u'familyname': u'Barnett', u'initials': u'DM'}, {u'familyname': u'Willis', u'initials': u'JR'}], u'occurrence': [{u'handle': u'1163485', u'@type': u'AMSID'}, {u'handle': u'10.1016/0022-5096(92)90002-J', u'@type': u'DOI'}], u'journaltitle': u'J. Mech. Phys. Solids', u'volumeid': u'40', u'firstpage': u'739', u'lastpage': u'765', u'year': u'1992', u'articletitle': {u'#text': u'Fracture mechanics for piezoelectric ceramics', u'@language': u'En'}}, u'citationnumber': u'13.', u'@id': u'CR13'}")],
[('AUTHOR_FIRST_NAME', u'TCT'), ('AUTHOR_LAST_NAME', u'Ting'), ('YEAR', u'1996'), ('PUBLISHER', u'Anisotropic'), ('PUBLISHER', u'Elasticity:'), ('PUBLISHER', u'Theory'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Applications'), ('REFPLAINTEXT', u'Ting, T.C.T.: Anisotropic Elasticity: Theory and Applications. Oxford University Press, New York (1996)'), ('REFSTR', "{u'bibunstructured': u'Ting, T.C.T.: Anisotropic Elasticity: Theory and Applications. Oxford University Press, New York (1996)', u'citationnumber': u'14.', u'@id': u'CR14', u'bibbook': {u'bibauthorname': {u'familyname': u'Ting', u'initials': u'TCT'}, u'publisherlocation': u'New York', u'occurrence': {u'handle': u'0883.73001', u'@type': u'ZLBID'}, u'booktitle': u'Anisotropic Elasticity: Theory and Applications', u'year': u'1996', u'publishername': u'Oxford University Press'}}")],
[('AUTHOR_FIRST_NAME', u'X'), ('AUTHOR_LAST_NAME', u'Wang'), ('AUTHOR_FIRST_NAME', u'H'), ('AUTHOR_LAST_NAME', u'Fan'), ('TITLE', u'A'), ('TITLE', u'piezoelectric'), ('TITLE', u'screw'), ('TITLE', u'dislocation'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'bimaterial'), ('TITLE', u'with'), ('TITLE', u'surface'), ('TITLE', u'piezoelectricity'), ('JOURNAL', u'Acta'), ('JOURNAL', u'Mech.'), ('VOLUME', u'226'), ('YEAR', u'2015'), ('PAGE', u'3317'), ('DOI', u'10.1007/s00707-015-1382-7'), ('REFPLAINTEXT', u'Wang, X., Fan, H.: A piezoelectric screw dislocation in a bimaterial with surface piezoelectricity. Acta Mech. 226, 3317\u20133331 (2015)'), ('REFSTR', "{u'bibunstructured': u'Wang, X., Fan, H.: A piezoelectric screw dislocation in a bimaterial with surface piezoelectricity. Acta Mech. 226, 3317\\u20133331 (2015)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Wang', u'initials': u'X'}, {u'familyname': u'Fan', u'initials': u'H'}], u'occurrence': [{u'handle': u'3395517', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00707-015-1382-7', u'@type': u'DOI'}], u'journaltitle': u'Acta Mech.', u'volumeid': u'226', u'firstpage': u'3317', u'lastpage': u'3331', u'year': u'2015', u'articletitle': {u'#text': u'A piezoelectric screw dislocation in a bimaterial with surface piezoelectricity', u'@language': u'En'}}, u'citationnumber': u'15.', u'@id': u'CR15'}")],
[('AUTHOR_FIRST_NAME', u'X'), ('AUTHOR_LAST_NAME', u'Wang'), ('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Schiavone'), ('TITLE', u'Debonded'), ('TITLE', u'arc'), ('TITLE', u'shaped'), ('TITLE', u'interface'), ('TITLE', u'conducting'), ('TITLE', u'rigid'), ('TITLE', u'line'), ('TITLE', u'inclusions'), ('TITLE', u'in'), ('TITLE', u'piezoelectric'), ('TITLE', u'composites'), ('JOURNAL', u'Comptes'), ('JOURNAL', u'Rendus'), ('JOURNAL', u'Mecanique'), ('VOLUME', u'345'), ('YEAR', u'2017'), ('PAGE', u'724'), ('REFPLAINTEXT', u'Wang, X., Schiavone, P.: Debonded arc shaped interface conducting rigid line inclusions in piezoelectric composites. Comptes Rendus Mecanique 345, 724\u2013731 (2017)'), ('REFSTR', "{u'bibunstructured': u'Wang, X., Schiavone, P.: Debonded arc shaped interface conducting rigid line inclusions in piezoelectric composites. Comptes Rendus Mecanique 345, 724\\u2013731 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Wang', u'initials': u'X'}, {u'familyname': u'Schiavone', u'initials': u'P'}], u'occurrence': {u'handle': u'10.1016/j.crme.2017.07.001', u'@type': u'DOI'}, u'journaltitle': u'Comptes Rendus Mecanique', u'volumeid': u'345', u'firstpage': u'724', u'lastpage': u'731', u'year': u'2017', u'articletitle': {u'#text': u'Debonded arc shaped interface conducting rigid line inclusions in piezoelectric composites', u'@language': u'En'}}, u'citationnumber': u'16.', u'@id': u'CR16'}")],
[('AUTHOR_FIRST_NAME', u'X'), ('AUTHOR_LAST_NAME', u'Wang'), ('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Schiavone'), ('TITLE', u'Interaction'), ('TITLE', u'between'), ('TITLE', u'a'), ('TITLE', u'completely'), ('TITLE', u'coated'), ('TITLE', u'semi-'), ('TITLE', u'infinite'), ('TITLE', u'crack'), ('TITLE', u'and'), ('TITLE', u'a'), ('TITLE', u'screw'), ('TITLE', u'dislocation'), ('JOURNAL', u'Zeitschrift'), ('JOURNAL', u'fur'), ('JOURNAL', u'angewandte'), ('JOURNAL', u'Mathematik'), ('JOURNAL', u'und'), ('JOURNAL', u'Physik'), ('VOLUME', u'70'), ('ISSUE', u'4'), ('YEAR', u'2019'), ('PAGE', u'116'), ('DOI', u'10.1007/s00033-019-1154-7'), ('REFPLAINTEXT', u'Wang, X., Schiavone, P.: Interaction between a completely coated semi-infinite crack and a screw dislocation. Zeitschrift fur angewandte Mathematik und Physik 70(4), 116 (2019)'), ('REFSTR', "{u'bibunstructured': u'Wang, X., Schiavone, P.: Interaction between a completely coated semi-infinite crack and a screw dislocation. Zeitschrift fur angewandte Mathematik und Physik 70(4), 116 (2019)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Wang', u'initials': u'X'}, {u'familyname': u'Schiavone', u'initials': u'P'}], u'issueid': u'4', u'journaltitle': u'Zeitschrift fur angewandte Mathematik und Physik', u'volumeid': u'70', u'firstpage': u'116', u'year': u'2019', u'articletitle': {u'#text': u'Interaction between a completely coated semi-infinite crack and a screw dislocation', u'@language': u'En'}, u'occurrence': [{u'handle': u'3982961', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-019-1154-7', u'@type': u'DOI'}]}, u'citationnumber': u'17.', u'@id': u'CR17'}")],
[('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Harrison'), ('TITLE', u'Modelling'), ('TITLE', u'the'), ('TITLE', u'forming'), ('TITLE', u'mechanics'), ('TITLE', u'of'), ('TITLE', u'engineering'), ('TITLE', u'fabrics'), ('TITLE', u'using'), ('TITLE', u'a'), ('TITLE', u'mutually'), ('TITLE', u'constrained'), ('TITLE', u'pantographic'), ('TITLE', u'beam'), ('TITLE', u'and'), ('TITLE', u'membrane'), ('TITLE', u'mesh'), ('JOURNAL', u'Compos.'), ('JOURNAL', u'Part'), ('JOURNAL', u'A'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Sci.'), ('JOURNAL', u'Manuf.'), ('VOLUME', u'81'), ('YEAR', u'2016'), ('PAGE', u'145'), ('REFPLAINTEXT', u'Harrison, P.: Modelling the forming mechanics of engineering fabrics using a mutually constrained pantographic beam and membrane mesh. Compos. Part A Appl. Sci. Manuf. 81, 145\u2013157 (2016)'), ('REFSTR', "{u'bibunstructured': u'Harrison, P.: Modelling the forming mechanics of engineering fabrics using a mutually constrained pantographic beam and membrane mesh. Compos. Part A Appl. Sci. Manuf. 81, 145\\u2013157 (2016)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Harrison', u'initials': u'P'}, u'occurrence': {u'handle': u'10.1016/j.compositesa.2015.11.005', u'@type': u'DOI'}, u'journaltitle': u'Compos. Part A Appl. Sci. Manuf.', u'volumeid': u'81', u'firstpage': u'145', u'lastpage': u'157', u'year': u'2016', u'articletitle': {u'#text': u'Modelling the forming mechanics of engineering fabrics using a mutually constrained pantographic beam and membrane mesh', u'@outputmedium': u'All', u'@language': u'En'}}, u'citationnumber': u'1.', u'@id': u'CR1'}")],
[('AUTHOR_FIRST_NAME', u'U'), ('AUTHOR_LAST_NAME', u'Andreaus'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('AUTHOR_FIRST_NAME', u'I'), ('AUTHOR_LAST_NAME', u'Giorgio'), ('AUTHOR_FIRST_NAME', u'L'), ('AUTHOR_LAST_NAME', u'Placidi'), ('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Lekszycki'), ('AUTHOR_FIRST_NAME', u'N'), ('AUTHOR_LAST_NAME', u'Rizzi'), ('TITLE', u'Numerical'), ('TITLE', u'simulations'), ('TITLE', u'of'), ('TITLE', u'classical'), ('TITLE', u'problems'), ('TITLE', u'in'), ('TITLE', u'two-'), ('TITLE', u'dimensional'), ('TITLE', u'(non)'), ('TITLE', u'linear'), ('TITLE', u'second'), ('TITLE', u'gradient'), ('TITLE', u'elasticity'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Eng.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'108'), ('YEAR', u'2016'), ('PAGE', u'34'), ('DOI', u'10.1016/j.ijengsci.2016.08.003'), ('REFPLAINTEXT', u'Andreaus, U., dell\u2019Isola, F., Giorgio, I., Placidi, L., Lekszycki, T., Rizzi, N.: Numerical simulations of classical problems in two-dimensional (non) linear second gradient elasticity. Int. J. Eng. Sci. 108, 34\u201350 (2016)'), ('REFSTR', "{u'bibunstructured': u'Andreaus, U., dell\\u2019Isola, F., Giorgio, I., Placidi, L., Lekszycki, T., Rizzi, N.: Numerical simulations of classical problems in two-dimensional (non) linear second gradient elasticity. Int. J. Eng. Sci. 108, 34\\u201350 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Andreaus', u'initials': u'U'}, {u'familyname': u'dell\\u2019Isola', u'initials': u'F'}, {u'familyname': u'Giorgio', u'initials': u'I'}, {u'familyname': u'Placidi', u'initials': u'L'}, {u'familyname': u'Lekszycki', u'initials': u'T'}, {u'familyname': u'Rizzi', u'initials': u'N'}], u'occurrence': [{u'handle': u'3546241', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.ijengsci.2016.08.003', u'@type': u'DOI'}], u'journaltitle': u'Int. J. Eng. Sci.', u'volumeid': u'108', u'firstpage': u'34', u'lastpage': u'50', u'year': u'2016', u'articletitle': {u'#text': u'Numerical simulations of classical problems in two-dimensional (non) linear second gradient elasticity', u'@language': u'En'}}, u'citationnumber': u'2.', u'@id': u'CR2'}")],
[('AUTHOR_FIRST_NAME', u'N'), ('AUTHOR_LAST_NAME', u'Auffray'), ('AUTHOR_FIRST_NAME', u'J'), ('AUTHOR_LAST_NAME', u'Dirrenberger'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Rosi'), ('TITLE', u'A'), ('TITLE', u'complete'), ('TITLE', u'description'), ('TITLE', u'of'), ('TITLE', u'bi-'), ('TITLE', u'dimensional'), ('TITLE', u'anisotropic'), ('TITLE', u'strain-'), ('TITLE', u'gradient'), ('TITLE', u'elasticity'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Solids'), ('JOURNAL', u'Struct.'), ('VOLUME', u'69'), ('YEAR', u'2015'), ('PAGE', u'195'), ('REFPLAINTEXT', u'Auffray, N., Dirrenberger, J., Rosi, G.: A complete description of bi-dimensional anisotropic strain-gradient elasticity. Int. J. Solids Struct. 69, 195\u2013206 (2015)'), ('REFSTR', "{u'bibunstructured': u'Auffray, N., Dirrenberger, J., Rosi, G.: A complete description of bi-dimensional anisotropic strain-gradient elasticity. Int. J. Solids Struct. 69, 195\\u2013206 (2015)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Auffray', u'initials': u'N'}, {u'familyname': u'Dirrenberger', u'initials': u'J'}, {u'familyname': u'Rosi', u'initials': u'G'}], u'occurrence': {u'handle': u'10.1016/j.ijsolstr.2015.04.036', u'@type': u'DOI'}, u'journaltitle': u'Int. J. Solids Struct.', u'volumeid': u'69', u'firstpage': u'195', u'lastpage': u'206', u'year': u'2015', u'articletitle': {u'#text': u'A complete description of bi-dimensional anisotropic strain-gradient elasticity', u'@language': u'En'}}, u'citationnumber': u'3.', u'@id': u'CR3'}")],
[('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Battista'), ('AUTHOR_FIRST_NAME', u'L'), ('AUTHOR_LAST_NAME', u'Rosa'), ('AUTHOR_FIRST_NAME', u'R'), ('AUTHOR_LAST_NAME', u'dellErba'), ('AUTHOR_FIRST_NAME', u'L'), ('AUTHOR_LAST_NAME', u'Greco'), ('TITLE', u'Numerical'), ('TITLE', u'investigation'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'particle'), ('TITLE', u'system'), ('TITLE', u'compared'), ('TITLE', u'with'), ('TITLE', u'first'), ('TITLE', u'and'), ('TITLE', u'second'), ('TITLE', u'gradient'), ('TITLE', u'continua:'), ('TITLE', u'deformation'), ('TITLE', u'and'), ('TITLE', u'fracture'), ('TITLE', u'phenomena'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Solids'), ('YEAR', u'2016'), ('DOI', u'10.1177/1081286516657889'), ('REFPLAINTEXT', u'Battista, A., Rosa, L., dell\u2019Erba, R., Greco, L.: Numerical investigation of a particle system compared with first and second gradient continua: deformation and fracture phenomena. Math. Mech. Solids (2016).'), ('REFSTR', "{u'bibunstructured': {u'#text': u'Battista, A., Rosa, L., dell\\u2019Erba, R., Greco, L.: Numerical investigation of a particle system compared with first and second gradient continua: deformation and fracture phenomena. Math. Mech. Solids (2016).', u'externalref': {u'refsource': u'https://doi.org/10.1177/1081286516657889', u'reftarget': {u'@address': u'10.1177/1081286516657889', u'@targettype': u'DOI'}}}, u'bibarticle': {u'bibauthorname': [{u'familyname': u'Battista', u'initials': u'A'}, {u'familyname': u'Rosa', u'initials': u'L'}, {u'familyname': u'dell\\u2019Erba', u'initials': u'R'}, {u'familyname': u'Greco', u'initials': u'L'}], u'occurrence': [{u'handle': u'10.1177/1081286516657889', u'@type': u'DOI'}, {u'handle': u'1395.74005', u'@type': u'ZLBID'}], u'journaltitle': u'Math. Mech. Solids', u'bibarticledoi': u'10.1177/1081286516657889', u'year': u'2016', u'articletitle': {u'#text': u'Numerical investigation of a particle system compared with first and second gradient continua: deformation and fracture phenomena', u'@language': u'En'}}, u'citationnumber': u'4.', u'@id': u'CR4'}")],
[('AUTHOR_FIRST_NAME', u'DJ'), ('AUTHOR_LAST_NAME', u'Steigmann'), ('TITLE', u'The'), ('TITLE', u'variational'), ('TITLE', u'structure'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'nonlinear'), ('TITLE', u'theory'), ('TITLE', u'for'), ('TITLE', u'spatial'), ('TITLE', u'lattices'), ('JOURNAL', u'Meccanica'), ('VOLUME', u'31'), ('YEAR', u'1996'), ('PAGE', u'441'), ('DOI', u'10.1007/BF00429932'), ('REFPLAINTEXT', u'Steigmann, D.J.: The variational structure of a nonlinear theory for spatial lattices. Meccanica 31, 441\u2013455 (1996)'), ('REFSTR', "{u'bibunstructured': u'Steigmann, D.J.: The variational structure of a nonlinear theory for spatial lattices. Meccanica 31, 441\\u2013455 (1996)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Steigmann', u'initials': u'DJ'}, u'occurrence': [{u'handle': u'1404203', u'@type': u'AMSID'}, {u'handle': u'10.1007/BF00429932', u'@type': u'DOI'}], u'journaltitle': u'Meccanica', u'volumeid': u'31', u'firstpage': u'441', u'lastpage': u'455', u'year': u'1996', u'articletitle': {u'#text': u'The variational structure of a nonlinear theory for spatial lattices', u'@language': u'En'}}, u'citationnumber': u'5.', u'@id': u'CR5'}")],
[('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Lekszycki'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Pawlikowski'), ('AUTHOR_FIRST_NAME', u'R'), ('AUTHOR_LAST_NAME', u'Grygoruk'), ('AUTHOR_FIRST_NAME', u'L'), ('AUTHOR_LAST_NAME', u'Greco'), ('TITLE', u'Designing'), ('TITLE', u'a'), ('TITLE', u'light'), ('TITLE', u'fabric'), ('TITLE', u'metamaterial'), ('TITLE', u'being'), ('TITLE', u'highly'), ('TITLE', u'macroscopically'), ('TITLE', u'tough'), ('TITLE', u'under'), ('TITLE', u'directional'), ('TITLE', u'extension:'), ('TITLE', u'first'), ('TITLE', u'experimental'), ('TITLE', u'evidence'), ('JOURNAL', u'Z.'), ('JOURNAL', u'f\xfcr'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'66'), ('YEAR', u'2015'), ('PAGE', u'3473'), ('DOI', u'10.1007/s00033-015-0556-4'), ('REFPLAINTEXT', u'dell\u2019Isola, F., Lekszycki, T., Pawlikowski, M., Grygoruk, R., Greco, L.: Designing a light fabric metamaterial being highly macroscopically tough under directional extension: first experimental evidence. Z. f\xfcr Angew. Math. Phys. 66, 3473\u20133498 (2015)'), ('REFSTR', "{u'bibunstructured': u'dell\\u2019Isola, F., Lekszycki, T., Pawlikowski, M., Grygoruk, R., Greco, L.: Designing a light fabric metamaterial being highly macroscopically tough under directional extension: first experimental evidence. Z. f\\xfcr Angew. Math. Phys. 66, 3473\\u20133498 (2015)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'dell\\u2019Isola', u'initials': u'F'}, {u'familyname': u'Lekszycki', u'initials': u'T'}, {u'familyname': u'Pawlikowski', u'initials': u'M'}, {u'familyname': u'Grygoruk', u'initials': u'R'}, {u'familyname': u'Greco', u'initials': u'L'}], u'occurrence': [{u'handle': u'3428477', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-015-0556-4', u'@type': u'DOI'}], u'journaltitle': u'Z. f\\xfcr Angew. Math. Phys.', u'volumeid': u'66', u'firstpage': u'3473', u'lastpage': u'3498', u'year': u'2015', u'articletitle': {u'#text': u'Designing a light fabric metamaterial being highly macroscopically tough under directional extension: first experimental evidence', u'@language': u'En'}}, u'citationnumber': u'6.', u'@id': u'CR6'}")],
[('AUTHOR_FIRST_NAME', u'I'), ('AUTHOR_LAST_NAME', u'Giorgio'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Della Corte'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('AUTHOR_FIRST_NAME', u'DJ'), ('AUTHOR_LAST_NAME', u'Steigmann'), ('TITLE', u'Buckling'), ('TITLE', u'modes'), ('TITLE', u'in'), ('TITLE', u'pantographic'), ('TITLE', u'lattices'), ('JOURNAL', u'C.'), ('JOURNAL', u'R.'), ('JOURNAL', u'Mec.'), ('VOLUME', u'344'), ('YEAR', u'2016'), ('PAGE', u'487'), ('REFPLAINTEXT', u'Giorgio, I., Della Corte, A., dell\u2019Isola, F., Steigmann, D.J.: Buckling modes in pantographic lattices. C. R. Mec. 344, 487\u2013501 (2016)'), ('REFSTR', "{u'bibunstructured': u'Giorgio, I., Della Corte, A., dell\\u2019Isola, F., Steigmann, D.J.: Buckling modes in pantographic lattices. C. R. Mec. 344, 487\\u2013501 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Giorgio', u'initials': u'I'}, {u'familyname': u'Della Corte', u'initials': u'A'}, {u'familyname': u'dell\\u2019Isola', u'initials': u'F'}, {u'familyname': u'Steigmann', u'initials': u'DJ'}], u'occurrence': {u'handle': u'10.1016/j.crme.2016.02.009', u'@type': u'DOI'}, u'journaltitle': u'C. R. Mec.', u'volumeid': u'344', u'firstpage': u'487', u'lastpage': u'501', u'year': u'2016', u'articletitle': {u'#text': u'Buckling modes in pantographic lattices', u'@language': u'En'}}, u'citationnumber': u'7.', u'@id': u'CR7'}")],
[('AUTHOR_FIRST_NAME', u'I'), ('AUTHOR_LAST_NAME', u'Giorgio'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Della Corte'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('TITLE', u'Dynamics'), ('TITLE', u'of'), ('TITLE', u'1D'), ('TITLE', u'nonlinear'), ('TITLE', u'pantographic'), ('TITLE', u'continua'), ('JOURNAL', u'Nonlinear'), ('JOURNAL', u'Dyn.'), ('VOLUME', u'88'), ('YEAR', u'2017'), ('PAGE', u'21'), ('REFPLAINTEXT', u'Giorgio, I., Della Corte, A., dell\u2019Isola, F.: Dynamics of 1D nonlinear pantographic continua. Nonlinear Dyn. 88, 21\u201331 (2017)'), ('REFSTR', "{u'bibunstructured': u'Giorgio, I., Della Corte, A., dell\\u2019Isola, F.: Dynamics of 1D nonlinear pantographic continua. Nonlinear Dyn. 88, 21\\u201331 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Giorgio', u'initials': u'I'}, {u'familyname': u'Della Corte', u'initials': u'A'}, {u'familyname': u'dell\\u2019Isola', u'initials': u'F'}], u'occurrence': {u'handle': u'10.1007/s11071-016-3228-9', u'@type': u'DOI'}, u'journaltitle': u'Nonlinear Dyn.', u'volumeid': u'88', u'firstpage': u'21', u'lastpage': u'31', u'year': u'2017', u'articletitle': {u'#text': u'Dynamics of 1D nonlinear pantographic continua', u'@language': u'En'}}, u'citationnumber': u'8.', u'@id': u'CR8'}")],
[('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Turco'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Misra'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Pawlikowski'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'Hild'), ('TITLE', u'Enhanced'), ('TITLE', u'PiolaHencky'), ('TITLE', u'discrete'), ('TITLE', u'models'), ('TITLE', u'for'), ('TITLE', u'pantographic'), ('TITLE', u'sheets'), ('TITLE', u'with'), ('TITLE', u'pivots'), ('TITLE', u'without'), ('TITLE', u'deformation'), ('TITLE', u'energy:'), ('TITLE', u'numerics'), ('TITLE', u'and'), ('TITLE', u'experiments'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Solids'), ('JOURNAL', u'Struct.'), ('VOLUME', u'147'), ('YEAR', u'2018'), ('PAGE', u'94'), ('REFPLAINTEXT', u'Turco, E., Misra, A., Pawlikowski, M., dell\u2019Isola, F., Hild, F.: Enhanced Piola\u2013Hencky discrete models for pantographic sheets with pivots without deformation energy: numerics and experiments. Int. J. Solids Struct. 147, 94\u2013109 (2018)'), ('REFSTR', "{u'bibunstructured': u'Turco, E., Misra, A., Pawlikowski, M., dell\\u2019Isola, F., Hild, F.: Enhanced Piola\\u2013Hencky discrete models for pantographic sheets with pivots without deformation energy: numerics and experiments. Int. J. Solids Struct. 147, 94\\u2013109 (2018)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Turco', u'initials': u'E'}, {u'familyname': u'Misra', u'initials': u'A'}, {u'familyname': u'Pawlikowski', u'initials': u'M'}, {u'familyname': u'dell\\u2019Isola', u'initials': u'F'}, {u'familyname': u'Hild', u'initials': u'F'}], u'occurrence': {u'handle': u'10.1016/j.ijsolstr.2018.05.015', u'@type': u'DOI'}, u'journaltitle': u'Int. J. Solids Struct.', u'volumeid': u'147', u'firstpage': u'94', u'lastpage': u'109', u'year': u'2018', u'articletitle': {u'#text': u'Enhanced Piola\\u2013Hencky discrete models for pantographic sheets with pivots without deformation energy: numerics and experiments', u'@language': u'En'}}, u'citationnumber': u'9.', u'@id': u'CR9'}")],
[('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Turco'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Golaszewski'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Cazzani'), ('AUTHOR_FIRST_NAME', u'N'), ('AUTHOR_LAST_NAME', u'Rizzi'), ('TITLE', u'Large'), ('TITLE', u'deformations'), ('TITLE', u'induced'), ('TITLE', u'in'), ('TITLE', u'planar'), ('TITLE', u'pantographic'), ('TITLE', u'sheets'), ('TITLE', u'by'), ('TITLE', u'loads'), ('TITLE', u'applied'), ('TITLE', u'on'), ('TITLE', u'fibers:'), ('TITLE', u'experimental'), ('TITLE', u'validation'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'discrete'), ('TITLE', u'Lagrangian'), ('TITLE', u'model'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Res.'), ('JOURNAL', u'Commun.'), ('VOLUME', u'76'), ('YEAR', u'2016'), ('PAGE', u'51'), ('REFPLAINTEXT', u'Turco, E., Golaszewski, M., Cazzani, A., Rizzi, N.: Large deformations induced in planar pantographic sheets by loads applied on fibers: experimental validation of a discrete Lagrangian model. Mech. Res. Commun. 76, 51\u201356 (2016a)'), ('REFSTR', "{u'bibunstructured': u'Turco, E., Golaszewski, M., Cazzani, A., Rizzi, N.: Large deformations induced in planar pantographic sheets by loads applied on fibers: experimental validation of a discrete Lagrangian model. Mech. Res. Commun. 76, 51\\u201356 (2016a)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Turco', u'initials': u'E'}, {u'familyname': u'Golaszewski', u'initials': u'M'}, {u'familyname': u'Cazzani', u'initials': u'A'}, {u'familyname': u'Rizzi', u'initials': u'N'}], u'occurrence': {u'handle': u'10.1016/j.mechrescom.2016.07.001', u'@type': u'DOI'}, u'journaltitle': u'Mech. Res. Commun.', u'volumeid': u'76', u'firstpage': u'51', u'lastpage': u'56', u'year': u'2016', u'articletitle': {u'#text': u'Large deformations induced in planar pantographic sheets by loads applied on fibers: experimental validation of a discrete Lagrangian model', u'@language': u'En'}}, u'citationnumber': u'10.', u'@id': u'CR10'}")],
[('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Turco'), ('AUTHOR_FIRST_NAME', u'K'), ('AUTHOR_LAST_NAME', u'Barcz'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Pawlikowski'), ('AUTHOR_FIRST_NAME', u'N'), ('AUTHOR_LAST_NAME', u'Rizzi'), ('TITLE', u'Non-'), ('TITLE', u'standard'), ('TITLE', u'coupled'), ('TITLE', u'extensional'), ('TITLE', u'and'), ('TITLE', u'bending'), ('TITLE', u'bias'), ('TITLE', u'tests'), ('TITLE', u'for'), ('TITLE', u'planar'), ('TITLE', u'pantographic'), ('TITLE', u'lattices.'), ('TITLE', u'Part'), ('TITLE', u'I:'), ('TITLE', u'numerical'), ('TITLE', u'simulations'), ('JOURNAL', u'Z.'), ('JOURNAL', u'f\xfcr'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'67'), ('YEAR', u'2016'), ('PAGE', u'122'), ('DOI', u'10.1007/s00033-016-0713-4'), ('REFPLAINTEXT', u'Turco, E., Barcz, K., Pawlikowski, M., Rizzi, N.: Non-standard coupled extensional and bending bias tests for planar pantographic lattices. Part I: numerical simulations. Z. f\xfcr Angew. Math. Phys. 67, 122 (2016)'), ('REFSTR', "{u'bibunstructured': u'Turco, E., Barcz, K., Pawlikowski, M., Rizzi, N.: Non-standard coupled extensional and bending bias tests for planar pantographic lattices. Part I: numerical simulations. Z. f\\xfcr Angew. Math. Phys. 67, 122 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Turco', u'initials': u'E'}, {u'familyname': u'Barcz', u'initials': u'K'}, {u'familyname': u'Pawlikowski', u'initials': u'M'}, {u'familyname': u'Rizzi', u'initials': u'N'}], u'occurrence': [{u'handle': u'3547709', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-016-0713-4', u'@type': u'DOI'}], u'journaltitle': u'Z. f\\xfcr Angew. Math. Phys.', u'volumeid': u'67', u'firstpage': u'122', u'year': u'2016', u'articletitle': {u'#text': u'Non-standard coupled extensional and bending bias tests for planar pantographic lattices. Part I: numerical simulations', u'@language': u'En'}}, u'citationnumber': u'11.', u'@id': u'CR11'}")],
[('AUTHOR_FIRST_NAME', u'JJ'), ('AUTHOR_LAST_NAME', u'Alibert'), ('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Seppecher'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('TITLE', u'Truss'), ('TITLE', u'modular'), ('TITLE', u'beams'), ('TITLE', u'with'), ('TITLE', u'deformation'), ('TITLE', u'energy'), ('TITLE', u'depending'), ('TITLE', u'on'), ('TITLE', u'higher'), ('TITLE', u'displacement'), ('TITLE', u'gradients'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Solids'), ('VOLUME', u'8'), ('YEAR', u'2003'), ('PAGE', u'51'), ('DOI', u'10.1177/1081286503008001658'), ('REFPLAINTEXT', u'Alibert, J.J., Seppecher, P., dell\u2019Isola, F.: Truss modular beams with deformation energy depending on higher displacement gradients. Math. Mech. Solids 8, 51\u201373 (2003)'), ('REFSTR', "{u'bibunstructured': u'Alibert, J.J., Seppecher, P., dell\\u2019Isola, F.: Truss modular beams with deformation energy depending on higher displacement gradients. Math. Mech. Solids 8, 51\\u201373 (2003)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Alibert', u'initials': u'JJ'}, {u'familyname': u'Seppecher', u'initials': u'P'}, {u'familyname': u'dell\\u2019Isola', u'initials': u'F'}], u'occurrence': [{u'handle': u'1959303', u'@type': u'AMSID'}, {u'handle': u'10.1177/1081286503008001658', u'@type': u'DOI'}], u'journaltitle': u'Math. Mech. Solids', u'volumeid': u'8', u'firstpage': u'51', u'lastpage': u'73', u'year': u'2003', u'articletitle': {u'#text': u'Truss modular beams with deformation energy depending on higher displacement gradients', u'@language': u'En'}}, u'citationnumber': u'12.', u'@id': u'CR12'}")],
[('AUTHOR_FIRST_NAME', u'U'), ('AUTHOR_LAST_NAME', u'Andreaus'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Spagnuolo'), ('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Lekszycki'), ('AUTHOR_FIRST_NAME', u'SR'), ('AUTHOR_LAST_NAME', u'Eugster'), ('TITLE', u'A'), ('TITLE', u'Ritz'), ('TITLE', u'approach'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'static'), ('TITLE', u'analysis'), ('TITLE', u'of'), ('TITLE', u'planar'), ('TITLE', u'pantographic'), ('TITLE', u'structures'), ('TITLE', u'modeled'), ('TITLE', u'with'), ('TITLE', u'nonlinear'), ('TITLE', u'EulerBernoulli'), ('TITLE', u'beams'), ('JOURNAL', u'Contin.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Thermodyn.'), ('VOLUME', u'30'), ('YEAR', u'2018'), ('PAGE', u'1103'), ('DOI', u'10.1007/s00161-018-0665-3'), ('REFPLAINTEXT', u'Andreaus, U., Spagnuolo, M., Lekszycki, T., Eugster, S.R.: A Ritz approach for the static analysis of planar pantographic structures modeled with nonlinear Euler\u2013Bernoulli beams. Contin. Mech. Thermodyn. 30, 1103\u20131123 (2018)'), ('REFSTR', "{u'bibunstructured': u'Andreaus, U., Spagnuolo, M., Lekszycki, T., Eugster, S.R.: A Ritz approach for the static analysis of planar pantographic structures modeled with nonlinear Euler\\u2013Bernoulli beams. Contin. Mech. Thermodyn. 30, 1103\\u20131123 (2018)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Andreaus', u'initials': u'U'}, {u'familyname': u'Spagnuolo', u'initials': u'M'}, {u'familyname': u'Lekszycki', u'initials': u'T'}, {u'familyname': u'Eugster', u'initials': u'SR'}], u'occurrence': [{u'handle': u'3842030', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00161-018-0665-3', u'@type': u'DOI'}], u'journaltitle': u'Contin. Mech. Thermodyn.', u'volumeid': u'30', u'firstpage': u'1103', u'lastpage': u'1123', u'year': u'2018', u'articletitle': {u'#text': u'A Ritz approach for the static analysis of planar pantographic structures modeled with nonlinear Euler\\u2013Bernoulli beams', u'@language': u'En'}}, u'citationnumber': u'13.', u'@id': u'CR13'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Spagnuolo'), ('AUTHOR_FIRST_NAME', u'K'), ('AUTHOR_LAST_NAME', u'Barcz'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Pfaff'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Franciosi'), ('TITLE', u'Qualitative'), ('TITLE', u'pivot'), ('TITLE', u'damage'), ('TITLE', u'analysis'), ('TITLE', u'in'), ('TITLE', u'aluminum'), ('TITLE', u'printed'), ('TITLE', u'pantographic'), ('TITLE', u'sheets:'), ('TITLE', u'numerics'), ('TITLE', u'and'), ('TITLE', u'experiments'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Res.'), ('JOURNAL', u'Commun.'), ('VOLUME', u'83'), ('YEAR', u'2017'), ('PAGE', u'47'), ('REFPLAINTEXT', u'Spagnuolo, M., Barcz, K., Pfaff, A., dell\u2019Isola, F., Franciosi, P.: Qualitative pivot damage analysis in aluminum printed pantographic sheets: numerics and experiments. Mech. Res. Commun. 83, 47\u201352 (2017)'), ('REFSTR', "{u'bibunstructured': u'Spagnuolo, M., Barcz, K., Pfaff, A., dell\\u2019Isola, F., Franciosi, P.: Qualitative pivot damage analysis in aluminum printed pantographic sheets: numerics and experiments. Mech. Res. Commun. 83, 47\\u201352 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Spagnuolo', u'initials': u'M'}, {u'familyname': u'Barcz', u'initials': u'K'}, {u'familyname': u'Pfaff', u'initials': u'A'}, {u'familyname': u'dell\\u2019Isola', u'initials': u'F'}, {u'familyname': u'Franciosi', u'initials': u'P'}], u'occurrence': {u'handle': u'10.1016/j.mechrescom.2017.05.005', u'@type': u'DOI'}, u'journaltitle': u'Mech. Res. Commun.', u'volumeid': u'83', u'firstpage': u'47', u'lastpage': u'52', u'year': u'2017', u'articletitle': {u'#text': u'Qualitative pivot damage analysis in aluminum printed pantographic sheets: numerics and experiments', u'@language': u'En'}}, u'citationnumber': u'14.', u'@id': u'CR14'}")],
[('AUTHOR_FIRST_NAME', u'D'), ('AUTHOR_LAST_NAME', u'Scerrato'), ('AUTHOR_FIRST_NAME', u'IA'), ('AUTHOR_LAST_NAME', u'Zhurba Eremeeva'), ('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Lekszycki'), ('AUTHOR_FIRST_NAME', u'NL'), ('AUTHOR_LAST_NAME', u'Rizzi'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'effect'), ('TITLE', u'of'), ('TITLE', u'shear'), ('TITLE', u'stiffness'), ('TITLE', u'on'), ('TITLE', u'the'), ('TITLE', u'plane'), ('TITLE', u'deformation'), ('TITLE', u'of'), ('TITLE', u'linear'), ('TITLE', u'second'), ('TITLE', u'gradient'), ('TITLE', u'pantographic'), ('TITLE', u'sheets'), ('JOURNAL', u'ZAMM'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Z.'), ('JOURNAL', u'f\xfcr'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('VOLUME', u'96'), ('YEAR', u'2016'), ('PAGE', u'1268'), ('DOI', u'10.1002/zamm.201600066'), ('REFPLAINTEXT', u'Scerrato, D., Zhurba Eremeeva, I.A., Lekszycki, T., Rizzi, N.L.: On the effect of shear stiffness on the plane deformation of linear second gradient pantographic sheets. ZAMM J. Appl. Math. Mech. Z. f\xfcr Angew. Math. Mech. 96, 1268\u20131279 (2016)'), ('REFSTR', "{u'bibunstructured': u'Scerrato, D., Zhurba Eremeeva, I.A., Lekszycki, T., Rizzi, N.L.: On the effect of shear stiffness on the plane deformation of linear second gradient pantographic sheets. ZAMM J. Appl. Math. Mech. Z. f\\xfcr Angew. Math. Mech. 96, 1268\\u20131279 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Scerrato', u'initials': u'D'}, {u'familyname': u'Zhurba Eremeeva', u'initials': u'IA'}, {u'familyname': u'Lekszycki', u'initials': u'T'}, {u'familyname': u'Rizzi', u'initials': u'NL'}], u'occurrence': [{u'handle': u'3580283', u'@type': u'AMSID'}, {u'handle': u'10.1002/zamm.201600066', u'@type': u'DOI'}], u'journaltitle': u'ZAMM J. Appl. Math. Mech. Z. f\\xfcr Angew. Math. Mech.', u'volumeid': u'96', u'firstpage': u'1268', u'lastpage': u'1279', u'year': u'2016', u'articletitle': {u'#text': u'On the effect of shear stiffness on the plane deformation of linear second gradient pantographic sheets', u'@language': u'En'}}, u'citationnumber': u'15.', u'@id': u'CR15'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Cuomo'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('AUTHOR_FIRST_NAME', u'L'), ('AUTHOR_LAST_NAME', u'Greco'), ('TITLE', u'Simplified'), ('TITLE', u'analysis'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'generalized'), ('TITLE', u'bias'), ('TITLE', u'test'), ('TITLE', u'for'), ('TITLE', u'fabrics'), ('TITLE', u'with'), ('TITLE', u'two'), ('TITLE', u'families'), ('TITLE', u'of'), ('TITLE', u'inextensible'), ('TITLE', u'fibres'), ('JOURNAL', u'Z.'), ('JOURNAL', u'f\xfcr'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'67'), ('YEAR', u'2016'), ('PAGE', u'61'), ('DOI', u'10.1007/s00033-016-0653-z'), ('REFPLAINTEXT', u'Cuomo, M., dell\u2019Isola, F., Greco, L.: Simplified analysis of a generalized bias test for fabrics with two families of inextensible fibres. Z. f\xfcr Angew. Math. Phys. 67, 61 (2016)'), ('REFSTR', "{u'bibunstructured': u'Cuomo, M., dell\\u2019Isola, F., Greco, L.: Simplified analysis of a generalized bias test for fabrics with two families of inextensible fibres. Z. f\\xfcr Angew. Math. Phys. 67, 61 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Cuomo', u'initials': u'M'}, {u'familyname': u'dell\\u2019Isola', u'initials': u'F'}, {u'familyname': u'Greco', u'initials': u'L'}], u'occurrence': [{u'handle': u'3494482', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-016-0653-z', u'@type': u'DOI'}], u'journaltitle': u'Z. f\\xfcr Angew. Math. Phys.', u'volumeid': u'67', u'firstpage': u'61', u'year': u'2016', u'articletitle': {u'#text': u'Simplified analysis of a generalized bias test for fabrics with two families of inextensible fibres', u'@language': u'En'}}, u'citationnumber': u'16.', u'@id': u'CR16'}")],
[('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('AUTHOR_FIRST_NAME', u'I'), ('AUTHOR_LAST_NAME', u'Giorgio'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Pawlikowski'), ('AUTHOR_FIRST_NAME', u'NL'), ('AUTHOR_LAST_NAME', u'Rizzi'), ('TITLE', u'Large'), ('TITLE', u'deformations'), ('TITLE', u'of'), ('TITLE', u'planar'), ('TITLE', u'extensible'), ('TITLE', u'beams'), ('TITLE', u'and'), ('TITLE', u'pantographic'), ('TITLE', u'lattices:'), ('TITLE', u'heuristic'), ('TITLE', u'homogenization,'), ('TITLE', u'experimental'), ('TITLE', u'and'), ('TITLE', u'numerical'), ('TITLE', u'examples'), ('TITLE', u'of'), ('TITLE', u'equilibrium'), ('JOURNAL', u'Proc.'), ('JOURNAL', u'R.'), ('JOURNAL', u'Soc.'), ('JOURNAL', u'A'), ('VOLUME', u'472'), ('YEAR', u'2016'), ('PAGE', u'20150790'), ('REFPLAINTEXT', u'dell\u2019Isola, F., Giorgio, I., Pawlikowski, M., Rizzi, N.L.: Large deformations of planar extensible beams and pantographic lattices: heuristic homogenization, experimental and numerical examples of equilibrium. Proc. R. Soc. A 472, 20150790 (2016)'), ('REFSTR', "{u'bibunstructured': u'dell\\u2019Isola, F., Giorgio, I., Pawlikowski, M., Rizzi, N.L.: Large deformations of planar extensible beams and pantographic lattices: heuristic homogenization, experimental and numerical examples of equilibrium. Proc. R. Soc. A 472, 20150790 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'dell\\u2019Isola', u'initials': u'F'}, {u'familyname': u'Giorgio', u'initials': u'I'}, {u'familyname': u'Pawlikowski', u'initials': u'M'}, {u'familyname': u'Rizzi', u'initials': u'NL'}], u'occurrence': {u'handle': u'10.1098/rspa.2015.0790', u'@type': u'DOI'}, u'journaltitle': u'Proc. R. Soc. A', u'volumeid': u'472', u'firstpage': u'20150790', u'year': u'2016', u'articletitle': {u'#text': u'Large deformations of planar extensible beams and pantographic lattices: heuristic homogenization, experimental and numerical examples of equilibrium', u'@language': u'En'}}, u'citationnumber': u'17.', u'@id': u'CR17'}")],
[('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Misra'), ('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Lekszycki'), ('AUTHOR_FIRST_NAME', u'I'), ('AUTHOR_LAST_NAME', u'Giorgio'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Ganzosch'), ('AUTHOR_FIRST_NAME', u'WH'), ('AUTHOR_LAST_NAME', u'Mller'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('TITLE', u'Pantographic'), ('TITLE', u'metamaterials'), ('TITLE', u'show'), ('TITLE', u'a'), ('TITLE', u'typical'), ('TITLE', u'Poynting'), ('TITLE', u'effect'), ('TITLE', u'reversal'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Res.'), ('JOURNAL', u'Commun.'), ('VOLUME', u'89'), ('YEAR', u'2018'), ('PAGE', u'6'), ('REFPLAINTEXT', u'Misra, A., Lekszycki, T., Giorgio, I., Ganzosch, G., M\xfcller, W.H., dell\u2019Isola, F.: Pantographic metamaterials show a typical Poynting effect reversal. Mech. Res. Commun. 89, 6\u201310 (2018)'), ('REFSTR', "{u'bibunstructured': u'Misra, A., Lekszycki, T., Giorgio, I., Ganzosch, G., M\\xfcller, W.H., dell\\u2019Isola, F.: Pantographic metamaterials show a typical Poynting effect reversal. Mech. Res. Commun. 89, 6\\u201310 (2018)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Misra', u'initials': u'A'}, {u'familyname': u'Lekszycki', u'initials': u'T'}, {u'familyname': u'Giorgio', u'initials': u'I'}, {u'familyname': u'Ganzosch', u'initials': u'G'}, {u'familyname': u'M\\xfcller', u'initials': u'WH'}, {u'familyname': u'dell\\u2019Isola', u'initials': u'F'}], u'occurrence': {u'handle': u'10.1016/j.mechrescom.2018.02.003', u'@type': u'DOI'}, u'journaltitle': u'Mech. Res. Commun.', u'volumeid': u'89', u'firstpage': u'6', u'lastpage': u'10', u'year': u'2018', u'articletitle': {u'#text': u'Pantographic metamaterials show a typical Poynting effect reversal', u'@language': u'En'}}, u'citationnumber': u'18.', u'@id': u'CR18'}")],
[('AUTHOR_FIRST_NAME', u'VA'), ('AUTHOR_LAST_NAME', u'Eremeyev'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('AUTHOR_FIRST_NAME', u'C'), ('AUTHOR_LAST_NAME', u'Boutin'), ('AUTHOR_FIRST_NAME', u'D'), ('AUTHOR_LAST_NAME', u'Steigmann'), ('TITLE', u'Linear'), ('TITLE', u'pantographic'), ('TITLE', u'sheets:'), ('TITLE', u'Existence'), ('TITLE', u'and'), ('TITLE', u'uniqueness'), ('TITLE', u'of'), ('TITLE', u'weak'), ('TITLE', u'solutions'), ('JOURNAL', u'J.'), ('JOURNAL', u'Elast.'), ('VOLUME', u'132'), ('YEAR', u'2017'), ('PAGE', u'1'), ('REFPLAINTEXT', u'Eremeyev, V.A., dell\u2019Isola, F., Boutin, C., Steigmann, D.: Linear pantographic sheets: Existence and uniqueness of weak solutions. J. Elast. 132, 1\u201322 (2017)'), ('REFSTR', "{u'bibunstructured': u'Eremeyev, V.A., dell\\u2019Isola, F., Boutin, C., Steigmann, D.: Linear pantographic sheets: Existence and uniqueness of weak solutions. J. Elast. 132, 1\\u201322 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Eremeyev', u'initials': u'VA'}, {u'familyname': u'dell\\u2019Isola', u'initials': u'F'}, {u'familyname': u'Boutin', u'initials': u'C'}, {u'familyname': u'Steigmann', u'initials': u'D'}], u'occurrence': [{u'handle': u'3831319', u'@type': u'AMSID'}, {u'handle': u'1398.74011', u'@type': u'ZLBID'}], u'journaltitle': u'J. Elast.', u'volumeid': u'132', u'firstpage': u'1', u'lastpage': u'22', u'year': u'2017', u'articletitle': {u'#text': u'Linear pantographic sheets: Existence and uniqueness of weak solutions', u'@language': u'En'}}, u'citationnumber': u'19.', u'@id': u'CR19'}")],
[('AUTHOR_FIRST_NAME', u'L'), ('AUTHOR_LAST_NAME', u'Placidi'), ('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Barchiesi'), ('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Turco'), ('AUTHOR_FIRST_NAME', u'N'), ('AUTHOR_LAST_NAME', u'Rizzi'), ('TITLE', u'A'), ('TITLE', u'review'), ('TITLE', u'on'), ('TITLE', u'2D'), ('TITLE', u'models'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'description'), ('TITLE', u'of'), ('TITLE', u'pantographic'), ('TITLE', u'fabrics'), ('JOURNAL', u'Z.'), ('JOURNAL', u'f\xfcr'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'67'), ('ISSUE', u'5'), ('YEAR', u'2016'), ('PAGE', u'121'), ('DOI', u'10.1007/s00033-016-0716-1'), ('REFPLAINTEXT', u'Placidi, L., Barchiesi, E., Turco, E., Rizzi, N.: A review on 2D models for the description of pantographic fabrics. Z. f\xfcr Angew. Math. Phys. 67(5), 121 (2016)'), ('REFSTR', "{u'bibunstructured': u'Placidi, L., Barchiesi, E., Turco, E., Rizzi, N.: A review on 2D models for the description of pantographic fabrics. Z. f\\xfcr Angew. Math. Phys. 67(5), 121 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Placidi', u'initials': u'L'}, {u'familyname': u'Barchiesi', u'initials': u'E'}, {u'familyname': u'Turco', u'initials': u'E'}, {u'familyname': u'Rizzi', u'initials': u'N'}], u'issueid': u'5', u'journaltitle': u'Z. f\\xfcr Angew. Math. Phys.', u'volumeid': u'67', u'firstpage': u'121', u'year': u'2016', u'articletitle': {u'#text': u'A review on 2D models for the description of pantographic fabrics', u'@language': u'En'}, u'occurrence': [{u'handle': u'3546348', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-016-0716-1', u'@type': u'DOI'}]}, u'citationnumber': u'20.', u'@id': u'CR20'}")],
[('AUTHOR_FIRST_NAME', u'L'), ('AUTHOR_LAST_NAME', u'Placidi'), ('AUTHOR_FIRST_NAME', u'U'), ('AUTHOR_LAST_NAME', u'Andreaus'), ('AUTHOR_FIRST_NAME', u'I'), ('AUTHOR_LAST_NAME', u'Giorgio'), ('TITLE', u'Identification'), ('TITLE', u'of'), ('TITLE', u'two-'), ('TITLE', u'dimensional'), ('TITLE', u'pantographic'), ('TITLE', u'structure'), ('TITLE', u'via'), ('TITLE', u'a'), ('TITLE', u'linear'), ('TITLE', u'D4'), ('TITLE', u'orthotropic'), ('TITLE', u'second'), ('TITLE', u'gradient'), ('TITLE', u'elastic'), ('TITLE', u'model'), ('JOURNAL', u'J.'), ('JOURNAL', u'Eng.'), ('JOURNAL', u'Math.'), ('VOLUME', u'103'), ('YEAR', u'2016'), ('PAGE', u'1'), ('DOI', u'10.1007/s10665-016-9856-8'), ('REFPLAINTEXT', u'Placidi, L., Andreaus, U., Giorgio, I.: Identification of two-dimensional pantographic structure via a linear D4 orthotropic second gradient elastic model. J. Eng. Math. 103, 1\u201321 (2016)'), ('REFSTR', "{u'bibunstructured': u'Placidi, L., Andreaus, U., Giorgio, I.: Identification of two-dimensional pantographic structure via a linear D4 orthotropic second gradient elastic model. J. Eng. Math. 103, 1\\u201321 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Placidi', u'initials': u'L'}, {u'familyname': u'Andreaus', u'initials': u'U'}, {u'familyname': u'Giorgio', u'initials': u'I'}], u'occurrence': [{u'handle': u'3624977', u'@type': u'AMSID'}, {u'handle': u'10.1007/s10665-016-9856-8', u'@type': u'DOI'}], u'journaltitle': u'J. Eng. Math.', u'volumeid': u'103', u'firstpage': u'1', u'lastpage': u'21', u'year': u'2016', u'articletitle': {u'#text': u'Identification of two-dimensional pantographic structure via a linear D4 orthotropic second gradient elastic model', u'@language': u'En'}}, u'citationnumber': u'21.', u'@id': u'CR21'}")],
[('AUTHOR_FIRST_NAME', u'I'), ('AUTHOR_LAST_NAME', u'Giorgio'), ('TITLE', u'Numerical'), ('TITLE', u'identification'), ('TITLE', u'procedure'), ('TITLE', u'between'), ('TITLE', u'a'), ('TITLE', u'micro-'), ('TITLE', u'Cauchy'), ('TITLE', u'model'), ('TITLE', u'and'), ('TITLE', u'a'), ('TITLE', u'macro-'), ('TITLE', u'second'), ('TITLE', u'gradient'), ('TITLE', u'model'), ('TITLE', u'for'), ('TITLE', u'planar'), ('TITLE', u'pantographic'), ('TITLE', u'structures'), ('JOURNAL', u'Z.'), ('JOURNAL', u'f\xfcr'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'67'), ('ISSUE', u'4'), ('YEAR', u'2016'), ('PAGE', u'95'), ('DOI', u'10.1007/s00033-016-0692-5'), ('REFPLAINTEXT', u'Giorgio, I.: Numerical identification procedure between a micro-Cauchy model and a macro-second gradient model for planar pantographic structures. Z. f\xfcr Angew. Math. Phys. 67(4), 95 (2016)'), ('REFSTR', "{u'bibunstructured': u'Giorgio, I.: Numerical identification procedure between a micro-Cauchy model and a macro-second gradient model for planar pantographic structures. Z. f\\xfcr Angew. Math. Phys. 67(4), 95 (2016)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Giorgio', u'initials': u'I'}, u'issueid': u'4', u'journaltitle': u'Z. f\\xfcr Angew. Math. Phys.', u'volumeid': u'67', u'firstpage': u'95', u'year': u'2016', u'articletitle': {u'#text': u'Numerical identification procedure between a micro-Cauchy model and a macro-second gradient model for planar pantographic structures', u'@language': u'En'}, u'occurrence': [{u'handle': u'3528393', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-016-0692-5', u'@type': u'DOI'}]}, u'citationnumber': u'22.', u'@id': u'CR22'}")],
[('AUTHOR_FIRST_NAME', u'Ivo'), ('AUTHOR_LAST_NAME', u'Babuka'), ('YEAR', u'1976'), ('PAGE', u'137'), ('PUBLISHER', u'Lecture'), ('PUBLISHER', u'Notes'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Economics'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Mathematical'), ('PUBLISHER', u'Systems'), ('REFPLAINTEXT', u'Babu\u0161ka, I.: Homogenization approach in engineering. In: Glowinski R., Lions J.L. (eds.) Computing Methods in Applied Sciences and Engineering, pp. 137\u2013153. Springer, Berlin (1976)'), ('REFSTR', "{u'bibunstructured': u'Babu\\u0161ka, I.: Homogenization approach in engineering. In: Glowinski R., Lions J.L. (eds.) Computing Methods in Applied Sciences and Engineering, pp. 137\\u2013153. Springer, Berlin (1976)', u'bibchapter': {u'bibauthorname': {u'familyname': u'Babu\\u0161ka', u'initials': u'Ivo'}, u'publisherlocation': u'Berlin, Heidelberg', u'booktitle': u'Lecture Notes in Economics and Mathematical Systems', u'firstpage': u'137', u'lastpage': u'153', u'year': u'1976', u'publishername': u'Springer Berlin Heidelberg', u'chaptertitle': {u'#text': u'Homogenization Approach In Engineering', u'@language': u'--'}}, u'citationnumber': u'23.', u'@id': u'CR23'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Allaire'), ('TITLE', u'Homogenization'), ('TITLE', u'and'), ('TITLE', u'two-'), ('TITLE', u'scale'), ('TITLE', u'convergence'), ('JOURNAL', u'SIAM'), ('JOURNAL', u'J.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Anal.'), ('VOLUME', u'23'), ('YEAR', u'1992'), ('PAGE', u'1482'), ('DOI', u'10.1137/0523084'), ('REFPLAINTEXT', u'Allaire, G.: Homogenization and two-scale convergence. SIAM J. Math. Anal. 23, 1482\u20131518 (1992)'), ('REFSTR', "{u'bibunstructured': u'Allaire, G.: Homogenization and two-scale convergence. SIAM J. Math. Anal. 23, 1482\\u20131518 (1992)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Allaire', u'initials': u'G'}, u'occurrence': [{u'handle': u'1185639', u'@type': u'AMSID'}, {u'handle': u'10.1137/0523084', u'@type': u'DOI'}], u'journaltitle': u'SIAM J. Math. Anal.', u'volumeid': u'23', u'firstpage': u'1482', u'lastpage': u'1518', u'year': u'1992', u'articletitle': {u'#text': u'Homogenization and two-scale convergence', u'@language': u'En'}}, u'citationnumber': u'24.', u'@id': u'CR24'}")],
[('AUTHOR_FIRST_NAME', u'L'), ('AUTHOR_LAST_NAME', u'Tartar'), ('YEAR', u'2009'), ('PUBLISHER', u'The'), ('PUBLISHER', u'general'), ('PUBLISHER', u'theory'), ('PUBLISHER', u'of'), ('PUBLISHER', u'homogenization:'), ('PUBLISHER', u'A'), ('PUBLISHER', u'personalized'), ('PUBLISHER', u'introduction'), ('REFPLAINTEXT', u'Tartar, L.: The general theory of homogenization: A personalized introduction. Springer, Berlin (2009)'), ('REFSTR', "{u'bibunstructured': u'Tartar, L.: The general theory of homogenization: A personalized introduction. Springer, Berlin (2009)', u'citationnumber': u'25.', u'@id': u'CR25', u'bibbook': {u'bibauthorname': {u'familyname': u'Tartar', u'initials': u'L'}, u'publisherlocation': u'Berlin', u'occurrence': {u'handle': u'1188.35004', u'@type': u'ZLBID'}, u'booktitle': u'The general theory of homogenization: A personalized introduction', u'year': u'2009', u'publishername': u'Springer'}}")],
[('AUTHOR_FIRST_NAME', u'Wenbin'), ('AUTHOR_LAST_NAME', u'Yu'), ('AUTHOR_FIRST_NAME', u'Tian'), ('AUTHOR_LAST_NAME', u'Tang'), ('YEAR', u'2009'), ('PAGE', u'117'), ('PUBLISHER', u'Solid'), ('PUBLISHER', u'Mechanics'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Its'), ('PUBLISHER', u'Applications'), ('REFPLAINTEXT', u'Yu, W., Tang, T.: Variational asymptotic method for unit cell homogenization. In: Gilat, R., Banks-Sills, L. (eds.) Advances in Mathematical Modeling and Experimental Methods for Materials and Structures, pp. 117\u2013130. Springer, Berlin (2009)'), ('REFSTR', "{u'bibunstructured': u'Yu, W., Tang, T.: Variational asymptotic method for unit cell homogenization. In: Gilat, R., Banks-Sills, L. (eds.) Advances in Mathematical Modeling and Experimental Methods for Materials and Structures, pp. 117\\u2013130. Springer, Berlin (2009)', u'bibchapter': {u'bibauthorname': [{u'familyname': u'Yu', u'initials': u'Wenbin'}, {u'familyname': u'Tang', u'initials': u'Tian'}], u'publisherlocation': u'Dordrecht', u'booktitle': u'Solid Mechanics and Its Applications', u'firstpage': u'117', u'lastpage': u'130', u'year': u'2009', u'publishername': u'Springer Netherlands', u'chaptertitle': {u'#text': u'Variational Asymptotic Method for Unit Cell Homogenization', u'@language': u'--'}}, u'citationnumber': u'26.', u'@id': u'CR26'}")],
[('REFPLAINTEXT', u'Golaszewski, M., Grygoruk, R., Giorgio, I., Laudato, M., & Di Cosmo, F.: Metamaterials with relative displacements in their microstructure: technological challenges in 3D printing, experiments and numerical predictions. Continuum Mech Thermodyn 31(4), 1015\u20131034 (2019)'), ('REFSTR', "{u'bibunstructured': u'Golaszewski, M., Grygoruk, R., Giorgio, I., Laudato, M., & Di Cosmo, F.: Metamaterials with relative displacements in their microstructure: technological challenges in 3D printing, experiments and numerical predictions. Continuum Mech Thermodyn 31(4), 1015\\u20131034 (2019)', u'citationnumber': u'27.', u'@id': u'CR27'}")],
[('REFPLAINTEXT', u'Yang, T., Bellouard, Y.: 3D electrostatic actuator fabricated by non-ablative femtosecond laser exposure and chemical etching. In: MATEC Web of Conferences vol. 32. EDP Sciences (2015)'), ('REFSTR', "{u'bibunstructured': u'Yang, T., Bellouard, Y.: 3D electrostatic actuator fabricated by non-ablative femtosecond laser exposure and chemical etching. In: MATEC Web of Conferences vol. 32. EDP Sciences (2015)', u'citationnumber': u'28.', u'@id': u'CR28'}")],
[('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'Koch'), ('AUTHOR_FIRST_NAME', u'D'), ('AUTHOR_LAST_NAME', u'Lehr'), ('AUTHOR_FIRST_NAME', u'O'), ('AUTHOR_LAST_NAME', u'Schnbrodt'), ('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Glaser'), ('AUTHOR_FIRST_NAME', u'R'), ('AUTHOR_LAST_NAME', u'Fechner'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'Frost'), ('TITLE', u'Manufacturing'), ('TITLE', u'of'), ('TITLE', u'highly-'), ('TITLE', u'dispersive,'), ('TITLE', u'high-'), ('TITLE', u'efficiency'), ('TITLE', u'transmission'), ('TITLE', u'gratings'), ('TITLE', u'by'), ('TITLE', u'laser'), ('TITLE', u'interference'), ('TITLE', u'lithography'), ('TITLE', u'and'), ('TITLE', u'dry'), ('TITLE', u'etching'), ('JOURNAL', u'Microelectron.'), ('JOURNAL', u'Eng.'), ('VOLUME', u'191'), ('YEAR', u'2018'), ('PAGE', u'60'), ('REFPLAINTEXT', u'Koch, F., Lehr, D., Sch\xf6nbrodt, O., Glaser, T., Fechner, R., Frost, F.: Manufacturing of highly-dispersive, high-efficiency transmission gratings by laser interference lithography and dry etching. Microelectron. Eng. 191, 60\u201365 (2018)'), ('REFSTR', "{u'bibunstructured': u'Koch, F., Lehr, D., Sch\\xf6nbrodt, O., Glaser, T., Fechner, R., Frost, F.: Manufacturing of highly-dispersive, high-efficiency transmission gratings by laser interference lithography and dry etching. Microelectron. Eng. 191, 60\\u201365 (2018)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Koch', u'initials': u'F'}, {u'familyname': u'Lehr', u'initials': u'D'}, {u'familyname': u'Sch\\xf6nbrodt', u'initials': u'O'}, {u'familyname': u'Glaser', u'initials': u'T'}, {u'familyname': u'Fechner', u'initials': u'R'}, {u'familyname': u'Frost', u'initials': u'F'}], u'occurrence': {u'handle': u'10.1016/j.mee.2018.01.031', u'@type': u'DOI'}, u'journaltitle': u'Microelectron. Eng.', u'volumeid': u'191', u'firstpage': u'60', u'lastpage': u'65', u'year': u'2018', u'articletitle': {u'#text': u'Manufacturing of highly-dispersive, high-efficiency transmission gratings by laser interference lithography and dry etching', u'@language': u'En'}}, u'citationnumber': u'29.', u'@id': u'CR29'}")],
[('AUTHOR_FIRST_NAME', u'K'), ('AUTHOR_LAST_NAME', u'Yamada'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Yamada'), ('AUTHOR_FIRST_NAME', u'H'), ('AUTHOR_LAST_NAME', u'Maki'), ('AUTHOR_FIRST_NAME', u'K'), ('AUTHOR_LAST_NAME', u'Itoh'), ('TITLE', u'Fabrication'), ('TITLE', u'of'), ('TITLE', u'arrays'), ('TITLE', u'of'), ('TITLE', u'tapered'), ('TITLE', u'silicon'), ('TITLE', u'micro-'), ('TITLE', u'/nano-'), ('TITLE', u'pillars'), ('TITLE', u'by'), ('TITLE', u'metal-'), ('TITLE', u'assisted'), ('TITLE', u'chemical'), ('TITLE', u'etching'), ('TITLE', u'and'), ('TITLE', u'anisotropic'), ('TITLE', u'wet'), ('TITLE', u'etching'), ('JOURNAL', u'Nanotechnology'), ('VOLUME', u'29'), ('YEAR', u'2018'), ('PAGE', u'28LT01'), ('REFPLAINTEXT', u'Yamada, K., Yamada, M., Maki, H., Itoh, K.: Fabrication of arrays of tapered silicon micro-/nano-pillars by metal-assisted chemical etching and anisotropic wet etching. Nanotechnology 29, 28LT01 (2018)'), ('REFSTR', "{u'bibunstructured': u'Yamada, K., Yamada, M., Maki, H., Itoh, K.: Fabrication of arrays of tapered silicon micro-/nano-pillars by metal-assisted chemical etching and anisotropic wet etching. Nanotechnology 29, 28LT01 (2018)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Yamada', u'initials': u'K'}, {u'familyname': u'Yamada', u'initials': u'M'}, {u'familyname': u'Maki', u'initials': u'H'}, {u'familyname': u'Itoh', u'initials': u'K'}], u'occurrence': {u'handle': u'10.1088/1361-6528/aac04b', u'@type': u'DOI'}, u'journaltitle': u'Nanotechnology', u'volumeid': u'29', u'firstpage': u'28LT01', u'year': u'2018', u'articletitle': {u'#text': u'Fabrication of arrays of tapered silicon micro-/nano-pillars by metal-assisted chemical etching and anisotropic wet etching', u'@language': u'En'}}, u'citationnumber': u'30.', u'@id': u'CR30'}")],
[('AUTHOR_FIRST_NAME', u'MP'), ('AUTHOR_LAST_NAME', u'Larsson'), ('TITLE', u'Arbitrarily'), ('TITLE', u'profiled'), ('TITLE', u'3D'), ('TITLE', u'polymer'), ('TITLE', u'MEMS'), ('TITLE', u'through'), ('TITLE', u'Si'), ('TITLE', u'micro-'), ('TITLE', u'moulding'), ('TITLE', u'and'), ('TITLE', u'bulk'), ('TITLE', u'micromachining'), ('JOURNAL', u'Microelectron.'), ('JOURNAL', u'Eng.'), ('VOLUME', u'83'), ('YEAR', u'2006'), ('PAGE', u'1257'), ('REFPLAINTEXT', u'Larsson, M.P.: Arbitrarily profiled 3D polymer MEMS through Si micro-moulding and bulk micromachining. Microelectron. Eng. 83, 1257\u20131260 (2006)'), ('REFSTR', "{u'bibunstructured': u'Larsson, M.P.: Arbitrarily profiled 3D polymer MEMS through Si micro-moulding and bulk micromachining. Microelectron. Eng. 83, 1257\\u20131260 (2006)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Larsson', u'initials': u'MP'}, u'occurrence': {u'handle': u'10.1016/j.mee.2006.01.215', u'@type': u'DOI'}, u'journaltitle': u'Microelectron. Eng.', u'volumeid': u'83', u'firstpage': u'1257', u'lastpage': u'1260', u'year': u'2006', u'articletitle': {u'#text': u'Arbitrarily profiled 3D polymer MEMS through Si micro-moulding and bulk micromachining', u'@language': u'En'}}, u'citationnumber': u'31.', u'@id': u'CR31'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Milton'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Briane'), ('AUTHOR_FIRST_NAME', u'D'), ('AUTHOR_LAST_NAME', u'Harutyunyan'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'possible'), ('TITLE', u'effective'), ('TITLE', u'elasticity'), ('TITLE', u'tensors'), ('TITLE', u'of'), ('TITLE', u'2-'), ('TITLE', u'dimensional'), ('TITLE', u'and'), ('TITLE', u'3-'), ('TITLE', u'dimensional'), ('TITLE', u'printed'), ('TITLE', u'materials'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Complex'), ('JOURNAL', u'Syst.'), ('VOLUME', u'5'), ('YEAR', u'2017'), ('PAGE', u'41'), ('DOI', u'10.2140/memocs.2017.5.41'), ('REFPLAINTEXT', u'Milton, G., Briane, M., Harutyunyan, D.: On the possible effective elasticity tensors of 2-dimensional and 3-dimensional printed materials. Math. Mech. Complex Syst. 5, 41\u201394 (2017)'), ('REFSTR', "{u'bibunstructured': u'Milton, G., Briane, M., Harutyunyan, D.: On the possible effective elasticity tensors of 2-dimensional and 3-dimensional printed materials. Math. Mech. Complex Syst. 5, 41\\u201394 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Milton', u'initials': u'G'}, {u'familyname': u'Briane', u'initials': u'M'}, {u'familyname': u'Harutyunyan', u'initials': u'D'}], u'occurrence': [{u'handle': u'3677943', u'@type': u'AMSID'}, {u'handle': u'10.2140/memocs.2017.5.41', u'@type': u'DOI'}], u'journaltitle': u'Math. Mech. Complex Syst.', u'volumeid': u'5', u'firstpage': u'41', u'lastpage': u'94', u'year': u'2017', u'articletitle': {u'#text': u'On the possible effective elasticity tensors of 2-dimensional and 3-dimensional printed materials', u'@language': u'En'}}, u'citationnumber': u'32.', u'@id': u'CR32'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Milton'), ('AUTHOR_FIRST_NAME', u'D'), ('AUTHOR_LAST_NAME', u'Harutyunyan'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Briane'), ('TITLE', u'Towards'), ('TITLE', u'a'), ('TITLE', u'complete'), ('TITLE', u'characterization'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'effective'), ('TITLE', u'elasticity'), ('TITLE', u'tensors'), ('TITLE', u'of'), ('TITLE', u'mixtures'), ('TITLE', u'of'), ('TITLE', u'an'), ('TITLE', u'elastic'), ('TITLE', u'phase'), ('TITLE', u'and'), ('TITLE', u'an'), ('TITLE', u'almost'), ('TITLE', u'rigid'), ('TITLE', u'phase'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Complex'), ('JOURNAL', u'Syst.'), ('VOLUME', u'5'), ('YEAR', u'2017'), ('PAGE', u'95'), ('DOI', u'10.2140/memocs.2017.5.95'), ('REFPLAINTEXT', u'Milton, G., Harutyunyan, D., Briane, M.: Towards a complete characterization of the effective elasticity tensors of mixtures of an elastic phase and an almost rigid phase. Math. Mech. Complex Syst. 5, 95\u2013113 (2017)'), ('REFSTR', "{u'bibunstructured': u'Milton, G., Harutyunyan, D., Briane, M.: Towards a complete characterization of the effective elasticity tensors of mixtures of an elastic phase and an almost rigid phase. Math. Mech. Complex Syst. 5, 95\\u2013113 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Milton', u'initials': u'G'}, {u'familyname': u'Harutyunyan', u'initials': u'D'}, {u'familyname': u'Briane', u'initials': u'M'}], u'occurrence': [{u'handle': u'3677944', u'@type': u'AMSID'}, {u'handle': u'10.2140/memocs.2017.5.95', u'@type': u'DOI'}], u'journaltitle': u'Math. Mech. Complex Syst.', u'volumeid': u'5', u'firstpage': u'95', u'lastpage': u'113', u'year': u'2017', u'articletitle': {u'#text': u'Towards a complete characterization of the effective elasticity tensors of mixtures of an elastic phase and an almost rigid phase', u'@language': u'En'}}, u'citationnumber': u'33.', u'@id': u'CR33'}")],
[('AUTHOR_FIRST_NAME', u'H'), ('AUTHOR_LAST_NAME', u'Abdoul-Anziz'), ('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Seppecher'), ('TITLE', u'Strain'), ('TITLE', u'gradient'), ('TITLE', u'and'), ('TITLE', u'generalized'), ('TITLE', u'continua'), ('TITLE', u'obtained'), ('TITLE', u'by'), ('TITLE', u'homogenizing'), ('TITLE', u'frame'), ('TITLE', u'lattices'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Complex'), ('JOURNAL', u'Syst.'), ('VOLUME', u'6'), ('YEAR', u'2018'), ('PAGE', u'213'), ('DOI', u'10.2140/memocs.2018.6.213'), ('REFPLAINTEXT', u'Abdoul-Anziz, H., Seppecher, P.: Strain gradient and generalized continua obtained by homogenizing frame lattices. Math. Mech. Complex Syst. 6, 213\u2013250 (2018)'), ('REFSTR', "{u'bibunstructured': u'Abdoul-Anziz, H., Seppecher, P.: Strain gradient and generalized continua obtained by homogenizing frame lattices. Math. Mech. Complex Syst. 6, 213\\u2013250 (2018)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Abdoul-Anziz', u'initials': u'H'}, {u'familyname': u'Seppecher', u'initials': u'P'}], u'occurrence': [{u'handle': u'3858777', u'@type': u'AMSID'}, {u'handle': u'10.2140/memocs.2018.6.213', u'@type': u'DOI'}], u'journaltitle': u'Math. Mech. Complex Syst.', u'volumeid': u'6', u'firstpage': u'213', u'lastpage': u'250', u'year': u'2018', u'articletitle': {u'#text': u'Strain gradient and generalized continua obtained by homogenizing frame lattices', u'@language': u'En'}}, u'citationnumber': u'34.', u'@id': u'CR34'}")],
[('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Barchiesi'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Spagnuolo'), ('AUTHOR_FIRST_NAME', u'L'), ('AUTHOR_LAST_NAME', u'Placidi'), ('TITLE', u'Mechanical'), ('TITLE', u'metamaterials:'), ('TITLE', u'a'), ('TITLE', u'state'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'art'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Solids'), ('VOLUME', u'24'), ('YEAR', u'2018'), ('PAGE', u'212'), ('DOI', u'10.1177/1081286517735695'), ('REFPLAINTEXT', u'Barchiesi, E., Spagnuolo, M., Placidi, L.: Mechanical metamaterials: a state of the art. Math. Mech. Solids 24, 212\u2013234 (2018)'), ('REFSTR', "{u'bibunstructured': u'Barchiesi, E., Spagnuolo, M., Placidi, L.: Mechanical metamaterials: a state of the art. Math. Mech. Solids 24, 212\\u2013234 (2018)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Barchiesi', u'initials': u'E'}, {u'familyname': u'Spagnuolo', u'initials': u'M'}, {u'familyname': u'Placidi', u'initials': u'L'}], u'occurrence': [{u'handle': u'3894504', u'@type': u'AMSID'}, {u'handle': u'10.1177/1081286517735695', u'@type': u'DOI'}], u'journaltitle': u'Math. Mech. Solids', u'volumeid': u'24', u'firstpage': u'212', u'lastpage': u'234', u'year': u'2018', u'articletitle': {u'#text': u'Mechanical metamaterials: a state of the art', u'@language': u'En'}}, u'citationnumber': u'35.', u'@id': u'CR35'}")],
[('REFPLAINTEXT', u'Di Cosmo, F., Laudato, M., Spagnuolo, M.: Acoustic metamaterials based on local resonances: homogenization, optimization and applications. In: Altenbach, H., Pouget, J., Rousseau, M., Collet, B., Michelitsch, Th. (eds.) Generalized Models and Non-classical Approaches in Complex Materials 1, pp. 247\u2013274. Springer, Berlin (2018)'), ('REFSTR', "{u'bibunstructured': u'Di Cosmo, F., Laudato, M., Spagnuolo, M.: Acoustic metamaterials based on local resonances: homogenization, optimization and applications. In: Altenbach, H., Pouget, J., Rousseau, M., Collet, B., Michelitsch, Th. (eds.) Generalized Models and Non-classical Approaches in Complex Materials 1, pp. 247\\u2013274. Springer, Berlin (2018)', u'citationnumber': u'36.', u'@id': u'CR36'}")],
[('AUTHOR_FIRST_NAME', u'Emilio'), ('AUTHOR_LAST_NAME', u'Barchiesi'), ('AUTHOR_FIRST_NAME', u'Francesco'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('AUTHOR_FIRST_NAME', u'Marco'), ('AUTHOR_LAST_NAME', u'Laudato'), ('AUTHOR_FIRST_NAME', u'Luca'), ('AUTHOR_LAST_NAME', u'Placidi'), ('AUTHOR_FIRST_NAME', u'Pierre'), ('AUTHOR_LAST_NAME', u'Seppecher'), ('YEAR', u'2018'), ('PAGE', u'43'), ('PUBLISHER', u'Advanced'), ('PUBLISHER', u'Structured'), ('PUBLISHER', u'Materials'), ('REFPLAINTEXT', u'Barchiesi, E., dell\u2019Isola, F., Laudato, M., Placidi, L., Seppecher, P.: A 1D continuum model for beams with pantographic microstructure: asymptotic micro-macro identification and numerical results. In: dell\u2019Isola, F., Eremeyev, V.A., Porubov, A.V. (eds.) Advances in Mechanics of Microstructured Media and Structures, pp. 43\u201374. Springer, Berlin (2018)'), ('REFSTR', "{u'bibunstructured': u'Barchiesi, E., dell\\u2019Isola, F., Laudato, M., Placidi, L., Seppecher, P.: A 1D continuum model for beams with pantographic microstructure: asymptotic micro-macro identification and numerical results. In: dell\\u2019Isola, F., Eremeyev, V.A., Porubov, A.V. (eds.) Advances in Mechanics of Microstructured Media and Structures, pp. 43\\u201374. Springer, Berlin (2018)', u'bibchapter': {u'bibauthorname': [{u'familyname': u'Barchiesi', u'initials': u'Emilio'}, {u'familyname': u'dell\\u2019Isola', u'initials': u'Francesco'}, {u'familyname': u'Laudato', u'initials': u'Marco'}, {u'familyname': u'Placidi', u'initials': u'Luca'}, {u'familyname': u'Seppecher', u'initials': u'Pierre'}], u'publisherlocation': u'Cham', u'booktitle': u'Advanced Structured Materials', u'firstpage': u'43', u'lastpage': u'74', u'year': u'2018', u'publishername': u'Springer International Publishing', u'chaptertitle': {u'#text': u'A 1D Continuum Model for Beams with Pantographic Microstructure: Asymptotic Micro-Macro Identification and Numerical Results', u'@language': u'--'}}, u'citationnumber': u'37.', u'@id': u'CR37'}")],
[('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Seppecher'), ('AUTHOR_FIRST_NAME', u'JJ'), ('AUTHOR_LAST_NAME', u'Alibert'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('TITLE', u'Linear'), ('TITLE', u'elastic'), ('TITLE', u'trusses'), ('TITLE', u'leading'), ('TITLE', u'to'), ('TITLE', u'continua'), ('TITLE', u'with'), ('TITLE', u'exotic'), ('TITLE', u'mechanical'), ('TITLE', u'interactions'), ('JOURNAL', u'J.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Conf.'), ('JOURNAL', u'Ser.'), ('VOLUME', u'319'), ('YEAR', u'2011'), ('PAGE', u'12'), ('REFPLAINTEXT', u'Seppecher, P., Alibert, J.J., dell\u2019Isola, F.: Linear elastic trusses leading to continua with exotic mechanical interactions. J. Phys. Conf. Ser. 319, 12\u201318 (2011)'), ('REFSTR', "{u'bibunstructured': u'Seppecher, P., Alibert, J.J., dell\\u2019Isola, F.: Linear elastic trusses leading to continua with exotic mechanical interactions. J. Phys. Conf. Ser. 319, 12\\u201318 (2011)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Seppecher', u'initials': u'P'}, {u'familyname': u'Alibert', u'initials': u'JJ'}, {u'familyname': u'dell\\u2019Isola', u'initials': u'F'}], u'occurrence': {u'handle': u'10.1088/1742-6596/319/1/012018', u'@type': u'DOI'}, u'journaltitle': u'J. Phys. Conf. Ser.', u'volumeid': u'319', u'firstpage': u'12', u'lastpage': u'18', u'year': u'2011', u'articletitle': {u'#text': u'Linear elastic trusses leading to continua with exotic mechanical interactions', u'@language': u'En'}}, u'citationnumber': u'38.', u'@id': u'CR38'}")],
[('AUTHOR_FIRST_NAME', u'JJ'), ('AUTHOR_LAST_NAME', u'Alibert'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Della Corte'), ('AUTHOR_FIRST_NAME', u'I'), ('AUTHOR_LAST_NAME', u'Giorgio'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Battista'), ('TITLE', u'Extensional'), ('TITLE', u'Elastica'), ('TITLE', u'in'), ('TITLE', u'large'), ('TITLE', u'deformation'), ('TITLE', u'as'), ('TITLE', u'-'), ('TITLE', u'limit'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'discrete'), ('TITLE', u'1D'), ('TITLE', u'mechanical'), ('TITLE', u'system'), ('JOURNAL', u'Z.'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'68'), ('YEAR', u'2017'), ('PAGE', u'42'), ('DOI', u'10.1007/s00033-017-0785-9'), ('REFPLAINTEXT', u'Alibert, J.J., Della Corte, A., Giorgio, I., Battista, A.: Extensional Elastica in large deformation as \u0393 -limit of a discrete 1D mechanical system. Z. Angew. Math. Phys. 68, 42 (2017)'), ('REFSTR', "{u'bibunstructured': u'Alibert, J.J., Della Corte, A., Giorgio, I., Battista, A.: Extensional Elastica in large deformation as \\u0393 -limit of a discrete 1D mechanical system. Z. Angew. Math. Phys. 68, 42 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Alibert', u'initials': u'JJ'}, {u'familyname': u'Della Corte', u'initials': u'A'}, {u'familyname': u'Giorgio', u'initials': u'I'}, {u'familyname': u'Battista', u'initials': u'A'}], u'occurrence': [{u'handle': u'3619438', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-017-0785-9', u'@type': u'DOI'}], u'journaltitle': u'Z. Angew. Math. Phys.', u'volumeid': u'68', u'firstpage': u'42', u'year': u'2017', u'articletitle': {u'#text': u'Extensional Elastica in large deformation as \\u0393 -limit of a discrete 1D mechanical system', u'@language': u'En'}}, u'citationnumber': u'39.', u'@id': u'CR39'}")],
[('AUTHOR_FIRST_NAME', u'VA'), ('AUTHOR_LAST_NAME', u'Eremeyev'), ('AUTHOR_FIRST_NAME', u'W'), ('AUTHOR_LAST_NAME', u'Pietraszkiewicz'), ('TITLE', u'The'), ('TITLE', u'nonlinear'), ('TITLE', u'theory'), ('TITLE', u'of'), ('TITLE', u'elastic'), ('TITLE', u'shells'), ('TITLE', u'with'), ('TITLE', u'phase'), ('TITLE', u'transitions'), ('JOURNAL', u'J.'), ('JOURNAL', u'Elast.'), ('VOLUME', u'74'), ('YEAR', u'2004'), ('PAGE', u'67'), ('DOI', u'10.1023/B:ELAS.0000026106.09385.8c'), ('REFPLAINTEXT', u'Eremeyev, V.A., Pietraszkiewicz, W.: The nonlinear theory of elastic shells with phase transitions. J. Elast. 74, 67\u201386 (2004)'), ('REFSTR', "{u'bibunstructured': u'Eremeyev, V.A., Pietraszkiewicz, W.: The nonlinear theory of elastic shells with phase transitions. J. Elast. 74, 67\\u201386 (2004)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Eremeyev', u'initials': u'VA'}, {u'familyname': u'Pietraszkiewicz', u'initials': u'W'}], u'occurrence': [{u'handle': u'2058196', u'@type': u'AMSID'}, {u'handle': u'10.1023/B:ELAS.0000026106.09385.8c', u'@type': u'DOI'}], u'journaltitle': u'J. Elast.', u'volumeid': u'74', u'firstpage': u'67', u'lastpage': u'86', u'year': u'2004', u'articletitle': {u'#text': u'The nonlinear theory of elastic shells with phase transitions', u'@language': u'En'}}, u'citationnumber': u'40.', u'@id': u'CR40'}")],
[('AUTHOR_FIRST_NAME', u'SR'), ('AUTHOR_LAST_NAME', u'Eugster'), ('AUTHOR_FIRST_NAME', u'C'), ('AUTHOR_LAST_NAME', u'Glocker'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'notion'), ('TITLE', u'of'), ('TITLE', u'stress'), ('TITLE', u'in'), ('TITLE', u'classical'), ('TITLE', u'continuum'), ('TITLE', u'mechanics'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Complex'), ('JOURNAL', u'Syst.'), ('VOLUME', u'5'), ('YEAR', u'2017'), ('PAGE', u'299'), ('DOI', u'10.2140/memocs.2017.5.299'), ('REFPLAINTEXT', u'Eugster, S.R., Glocker, C.: On the notion of stress in classical continuum mechanics. Math. Mech. Complex Syst. 5, 299\u2013338 (2017)'), ('REFSTR', "{u'bibunstructured': u'Eugster, S.R., Glocker, C.: On the notion of stress in classical continuum mechanics. Math. Mech. Complex Syst. 5, 299\\u2013338 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Eugster', u'initials': u'SR'}, {u'familyname': u'Glocker', u'initials': u'C'}], u'occurrence': [{u'handle': u'3740256', u'@type': u'AMSID'}, {u'handle': u'10.2140/memocs.2017.5.299', u'@type': u'DOI'}], u'journaltitle': u'Math. Mech. Complex Syst.', u'volumeid': u'5', u'firstpage': u'299', u'lastpage': u'338', u'year': u'2017', u'articletitle': {u'#text': u'On the notion of stress in classical continuum mechanics', u'@language': u'En'}}, u'citationnumber': u'41.', u'@id': u'CR41'}")],
[('AUTHOR_FIRST_NAME', u'D'), ('AUTHOR_LAST_NAME', u'Steigmann'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Faulkner'), ('TITLE', u'Variational'), ('TITLE', u'theory'), ('TITLE', u'for'), ('TITLE', u'spatial'), ('TITLE', u'rods'), ('JOURNAL', u'J.'), ('JOURNAL', u'Elast.'), ('VOLUME', u'33'), ('YEAR', u'1993'), ('PAGE', u'1'), ('DOI', u'10.1007/BF00042633'), ('REFPLAINTEXT', u'Steigmann, D., Faulkner, M.: Variational theory for spatial rods. J. Elast. 33, 1\u201326 (1993)'), ('REFSTR', "{u'bibunstructured': u'Steigmann, D., Faulkner, M.: Variational theory for spatial rods. J. Elast. 33, 1\\u201326 (1993)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Steigmann', u'initials': u'D'}, {u'familyname': u'Faulkner', u'initials': u'M'}], u'occurrence': [{u'handle': u'1255038', u'@type': u'AMSID'}, {u'handle': u'10.1007/BF00042633', u'@type': u'DOI'}], u'journaltitle': u'J. Elast.', u'volumeid': u'33', u'firstpage': u'1', u'lastpage': u'26', u'year': u'1993', u'articletitle': {u'#text': u'Variational theory for spatial rods', u'@language': u'En'}}, u'citationnumber': u'42.', u'@id': u'CR42'}")],
[('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Germain'), ('TITLE', u'The'), ('TITLE', u'method'), ('TITLE', u'of'), ('TITLE', u'virtual'), ('TITLE', u'power'), ('TITLE', u'in'), ('TITLE', u'continuum'), ('TITLE', u'mechanics.'), ('TITLE', u'Part'), ('TITLE', u'2:'), ('TITLE', u'microstructure'), ('JOURNAL', u'SIAM'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'25'), ('YEAR', u'1973'), ('PAGE', u'556'), ('REFPLAINTEXT', u'Germain, P.: The method of virtual power in continuum mechanics. Part 2: microstructure. SIAM J. Appl. Math. 25, 556\u2013575 (1973)'), ('REFSTR', "{u'bibunstructured': u'Germain, P.: The method of virtual power in continuum mechanics. Part 2: microstructure. SIAM J. Appl. Math. 25, 556\\u2013575 (1973)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Germain', u'initials': u'P'}, u'occurrence': {u'handle': u'10.1137/0125053', u'@type': u'DOI'}, u'journaltitle': u'SIAM J. Appl. Math.', u'volumeid': u'25', u'firstpage': u'556', u'lastpage': u'575', u'year': u'1973', u'articletitle': {u'#text': u'The method of virtual power in continuum mechanics. Part 2: microstructure', u'@language': u'En'}}, u'citationnumber': u'43.', u'@id': u'CR43'}")],
[('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Forest'), ('AUTHOR_FIRST_NAME', u'R'), ('AUTHOR_LAST_NAME', u'Sievert'), ('TITLE', u'Nonlinear'), ('TITLE', u'microstrain'), ('TITLE', u'theories'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Solids'), ('JOURNAL', u'Struct.'), ('VOLUME', u'43'), ('YEAR', u'2006'), ('PAGE', u'7224'), ('DOI', u'10.1016/j.ijsolstr.2006.05.012'), ('REFPLAINTEXT', u'Forest, S., Sievert, R.: Nonlinear microstrain theories. Int. J. Solids Struct. 43, 7224\u20137245 (2006). Size-dependent Mechanics of Materials'), ('REFSTR', "{u'bibunstructured': u'Forest, S., Sievert, R.: Nonlinear microstrain theories. Int. J. Solids Struct. 43, 7224\\u20137245 (2006). Size-dependent Mechanics of Materials', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Forest', u'initials': u'S'}, {u'familyname': u'Sievert', u'initials': u'R'}], u'occurrence': [{u'handle': u'2281498', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.ijsolstr.2006.05.012', u'@type': u'DOI'}], u'journaltitle': u'Int. J. Solids Struct.', u'volumeid': u'43', u'firstpage': u'7224', u'lastpage': u'7245', u'bibcomments': u'Size-dependent Mechanics of Materials', u'year': u'2006', u'articletitle': {u'#text': u'Nonlinear microstrain theories', u'@language': u'En'}}, u'citationnumber': u'44.', u'@id': u'CR44'}")],
[('AUTHOR_FIRST_NAME', u'VA'), ('AUTHOR_LAST_NAME', u'Eremeyev'), ('AUTHOR_FIRST_NAME', u'LP'), ('AUTHOR_LAST_NAME', u'Lebedev'), ('AUTHOR_FIRST_NAME', u'H'), ('AUTHOR_LAST_NAME', u'Altenbach'), ('YEAR', u'2012'), ('PUBLISHER', u'Foundations'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Micropolar'), ('PUBLISHER', u'Mechanics'), ('REFPLAINTEXT', u'Eremeyev, V.A., Lebedev, L.P., Altenbach, H.: Foundations of Micropolar Mechanics. Springer, Berlin (2012)'), ('REFSTR', "{u'bibunstructured': u'Eremeyev, V.A., Lebedev, L.P., Altenbach, H.: Foundations of Micropolar Mechanics. Springer, Berlin (2012)', u'citationnumber': u'45.', u'@id': u'CR45', u'bibbook': {u'bibauthorname': [{u'familyname': u'Eremeyev', u'initials': u'VA'}, {u'familyname': u'Lebedev', u'initials': u'LP'}, {u'familyname': u'Altenbach', u'initials': u'H'}], u'publisherlocation': u'Berlin', u'occurrence': {u'handle': u'1257.74002', u'@type': u'ZLBID'}, u'booktitle': u'Foundations of Micropolar Mechanics', u'year': u'2012', u'publishername': u'Springer'}}")],
[('AUTHOR_FIRST_NAME', u'Holm'), ('AUTHOR_LAST_NAME', u'Altenbach'), ('AUTHOR_FIRST_NAME', u'Mircea'), ('AUTHOR_LAST_NAME', u'Brsan'), ('AUTHOR_FIRST_NAME', u'Victor A.'), ('AUTHOR_LAST_NAME', u'Eremeyev'), ('YEAR', u'2013'), ('PAGE', u'179'), ('PUBLISHER', u'Generalized'), ('PUBLISHER', u'Continua'), ('PUBLISHER', u'from'), ('PUBLISHER', u'the'), ('PUBLISHER', u'Theory'), ('PUBLISHER', u'to'), ('PUBLISHER', u'Engineering'), ('PUBLISHER', u'Applications'), ('REFPLAINTEXT', u'Altenbach, H., B\xeersan, M., Eremeyev, V.A.: Cosserat-type rods. In: Altenbach, H., Eremeyev, V.A. (eds.) Generalized Continua from the Theory to Engineering Applications, pp. 179\u2013248. Springer, Berlin (2013)'), ('REFSTR', "{u'bibunstructured': u'Altenbach, H., B\\xeersan, M., Eremeyev, V.A.: Cosserat-type rods. In: Altenbach, H., Eremeyev, V.A. (eds.) Generalized Continua from the Theory to Engineering Applications, pp. 179\\u2013248. Springer, Berlin (2013)', u'bibchapter': {u'bibauthorname': [{u'familyname': u'Altenbach', u'initials': u'Holm'}, {u'familyname': u'B\\xeersan', u'initials': u'Mircea'}, {u'familyname': u'Eremeyev', u'initials': u'Victor A.'}], u'publisherlocation': u'Vienna', u'occurrence': {u'handle': u'10.1007/978-3-7091-1371-4_4', u'@type': u'DOI'}, u'booktitle': u'Generalized Continua from the Theory to Engineering Applications', u'firstpage': u'179', u'lastpage': u'248', u'year': u'2013', u'publishername': u'Springer Vienna', u'chaptertitle': {u'#text': u'Cosserat-Type Rods', u'@language': u'--'}}, u'citationnumber': u'46.', u'@id': u'CR46'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Spagnuolo'), ('AUTHOR_FIRST_NAME', u'U'), ('AUTHOR_LAST_NAME', u'Andreaus'), ('TITLE', u'A'), ('TITLE', u'targeted'), ('TITLE', u'review'), ('TITLE', u'on'), ('TITLE', u'large'), ('TITLE', u'deformations'), ('TITLE', u'of'), ('TITLE', u'planar'), ('TITLE', u'elastic'), ('TITLE', u'beams:'), ('TITLE', u'extensibility,'), ('TITLE', u'distributed'), ('TITLE', u'loads,'), ('TITLE', u'buckling'), ('TITLE', u'and'), ('TITLE', u'post-'), ('TITLE', u'buckling'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Solids'), ('VOLUME', u'24'), ('YEAR', u'2018'), ('PAGE', u'258'), ('DOI', u'10.1177/1081286517737000'), ('REFPLAINTEXT', u'Spagnuolo, M., Andreaus, U.: A targeted review on large deformations of planar elastic beams: extensibility, distributed loads, buckling and post-buckling. Math. Mech. Solids 24, 258\u2013280 (2018)'), ('REFSTR', "{u'bibunstructured': u'Spagnuolo, M., Andreaus, U.: A targeted review on large deformations of planar elastic beams: extensibility, distributed loads, buckling and post-buckling. Math. Mech. Solids 24, 258\\u2013280 (2018)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Spagnuolo', u'initials': u'M'}, {u'familyname': u'Andreaus', u'initials': u'U'}], u'occurrence': [{u'handle': u'3894506', u'@type': u'AMSID'}, {u'handle': u'10.1177/1081286517737000', u'@type': u'DOI'}], u'journaltitle': u'Math. Mech. Solids', u'volumeid': u'24', u'firstpage': u'258', u'lastpage': u'280', u'year': u'2018', u'articletitle': {u'#text': u'A targeted review on large deformations of planar elastic beams: extensibility, distributed loads, buckling and post-buckling', u'@language': u'En'}}, u'citationnumber': u'47.', u'@id': u'CR47'}")],
[('AUTHOR_FIRST_NAME', u'B'), ('AUTHOR_LAST_NAME', u'Abali'), ('AUTHOR_FIRST_NAME', u'W'), ('AUTHOR_LAST_NAME', u'Mller'), ('AUTHOR_FIRST_NAME', u'V'), ('AUTHOR_LAST_NAME', u'Eremeyev'), ('TITLE', u'Strain'), ('TITLE', u'gradient'), ('TITLE', u'elasticity'), ('TITLE', u'with'), ('TITLE', u'geometric'), ('TITLE', u'nonlinearities'), ('TITLE', u'and'), ('TITLE', u'its'), ('TITLE', u'computational'), ('TITLE', u'evaluation'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Adv.'), ('JOURNAL', u'Mater.'), ('JOURNAL', u'Mod.'), ('JOURNAL', u'Process.'), ('VOLUME', u'1'), ('YEAR', u'2015'), ('PAGE', u'4'), ('REFPLAINTEXT', u'Abali, B., M\xfcller, W., Eremeyev, V.: Strain gradient elasticity with geometric nonlinearities and its computational evaluation. Mech. Adv. Mater. Mod. Process. 1, 4 (2015)'), ('REFSTR', "{u'bibunstructured': u'Abali, B., M\\xfcller, W., Eremeyev, V.: Strain gradient elasticity with geometric nonlinearities and its computational evaluation. Mech. Adv. Mater. Mod. Process. 1, 4 (2015)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Abali', u'initials': u'B'}, {u'familyname': u'M\\xfcller', u'initials': u'W'}, {u'familyname': u'Eremeyev', u'initials': u'V'}], u'occurrence': {u'handle': u'10.1186/s40759-015-0004-3', u'@type': u'DOI'}, u'journaltitle': u'Mech. Adv. Mater. Mod. Process.', u'volumeid': u'1', u'firstpage': u'4', u'year': u'2015', u'articletitle': {u'#text': u'Strain gradient elasticity with geometric nonlinearities and its computational evaluation', u'@language': u'En'}}, u'citationnumber': u'48.', u'@id': u'CR48'}")],
[('AUTHOR_FIRST_NAME', u'BE'), ('AUTHOR_LAST_NAME', u'Abali'), ('AUTHOR_FIRST_NAME', u'WH'), ('AUTHOR_LAST_NAME', u'Mller'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'dellIsola'), ('TITLE', u'Theory'), ('TITLE', u'and'), ('TITLE', u'computation'), ('TITLE', u'of'), ('TITLE', u'higher'), ('TITLE', u'gradient'), ('TITLE', u'elasticity'), ('TITLE', u'theories'), ('TITLE', u'based'), ('TITLE', u'on'), ('TITLE', u'action'), ('TITLE', u'principles'), ('JOURNAL', u'Arch.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Mech.'), ('VOLUME', u'87'), ('YEAR', u'2017'), ('PAGE', u'1495'), ('REFPLAINTEXT', u'Abali, B.E., M\xfcller, W.H., dell\u2019Isola, F.: Theory and computation of higher gradient elasticity theories based on action principles. Arch. Appl. Mech. 87, 1495\u20131510 (2017)'), ('REFSTR', "{u'bibunstructured': u'Abali, B.E., M\\xfcller, W.H., dell\\u2019Isola, F.: Theory and computation of higher gradient elasticity theories based on action principles. Arch. Appl. Mech. 87, 1495\\u20131510 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Abali', u'initials': u'BE'}, {u'familyname': u'M\\xfcller', u'initials': u'WH'}, {u'familyname': u'dell\\u2019Isola', u'initials': u'F'}], u'occurrence': {u'handle': u'10.1007/s00419-017-1266-5', u'@type': u'DOI'}, u'journaltitle': u'Arch. Appl. Mech.', u'volumeid': u'87', u'firstpage': u'1495', u'lastpage': u'1510', u'year': u'2017', u'articletitle': {u'#text': u'Theory and computation of higher gradient elasticity theories based on action principles', u'@language': u'En'}}, u'citationnumber': u'49.', u'@id': u'CR49'}")],
[('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Riks'), ('TITLE', u'An'), ('TITLE', u'incremental'), ('TITLE', u'approach'), ('TITLE', u'to'), ('TITLE', u'the'), ('TITLE', u'solution'), ('TITLE', u'of'), ('TITLE', u'snapping'), ('TITLE', u'and'), ('TITLE', u'buckling'), ('TITLE', u'problems'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Solids'), ('JOURNAL', u'Struct.'), ('VOLUME', u'15'), ('YEAR', u'1979'), ('PAGE', u'529'), ('DOI', u'10.1016/0020-7683(79)90081-7'), ('REFPLAINTEXT', u'Riks, E.: An incremental approach to the solution of snapping and buckling problems. Int. J. Solids Struct. 15, 529\u2013551 (1979)'), ('REFSTR', "{u'bibunstructured': u'Riks, E.: An incremental approach to the solution of snapping and buckling problems. Int. J. Solids Struct. 15, 529\\u2013551 (1979)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Riks', u'initials': u'E'}, u'occurrence': [{u'handle': u'537646', u'@type': u'AMSID'}, {u'handle': u'10.1016/0020-7683(79)90081-7', u'@type': u'DOI'}], u'journaltitle': u'Int. J. Solids Struct.', u'volumeid': u'15', u'firstpage': u'529', u'lastpage': u'551', u'year': u'1979', u'articletitle': {u'#text': u'An incremental approach to the solution of snapping and buckling problems', u'@language': u'En'}}, u'citationnumber': u'50.', u'@id': u'CR50'}")],
[('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'Alouges'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Soyeur'), ('TITLE', u'On'), ('TITLE', u'global'), ('TITLE', u'weak'), ('TITLE', u'solutions'), ('TITLE', u'for'), ('TITLE', u'LandauLifshitz'), ('TITLE', u'equations:'), ('TITLE', u'existence'), ('TITLE', u'and'), ('TITLE', u'nonuniqueness'), ('JOURNAL', u'Nonlinear'), ('JOURNAL', u'Anal.'), ('JOURNAL', u'TMA'), ('VOLUME', u'18'), ('ISSUE', u'11'), ('YEAR', u'1992'), ('PAGE', u'1071'), ('DOI', u'10.1016/0362-546X(92)90196-L'), ('REFPLAINTEXT', u'Alouges, F., Soyeur, A.: On global weak solutions for Landau\u2013Lifshitz equations: existence and nonuniqueness. Nonlinear Anal. TMA 18(11), 1071\u20131084 (1992)'), ('REFSTR', "{u'bibunstructured': u'Alouges, F., Soyeur, A.: On global weak solutions for Landau\\u2013Lifshitz equations: existence and nonuniqueness. Nonlinear Anal. TMA 18(11), 1071\\u20131084 (1992)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Alouges', u'initials': u'F'}, {u'familyname': u'Soyeur', u'initials': u'A'}], u'issueid': u'11', u'journaltitle': u'Nonlinear Anal. TMA', u'volumeid': u'18', u'firstpage': u'1071', u'lastpage': u'1084', u'year': u'1992', u'articletitle': {u'#text': u'On global weak solutions for Landau\\u2013Lifshitz equations: existence and nonuniqueness', u'@outputmedium': u'All', u'@language': u'En'}, u'occurrence': [{u'handle': u'1167422', u'@type': u'AMSID'}, {u'handle': u'10.1016/0362-546X(92)90196-L', u'@type': u'DOI'}]}, u'citationnumber': u'1.', u'@id': u'CR1'}")],
[('AUTHOR_FIRST_NAME', u'I'), ('AUTHOR_LAST_NAME', u'Bejenaru'), ('AUTHOR_FIRST_NAME', u'AD'), ('AUTHOR_LAST_NAME', u'Ionescu'), ('AUTHOR_FIRST_NAME', u'CE'), ('AUTHOR_LAST_NAME', u'Kenig'), ('AUTHOR_FIRST_NAME', u'D'), ('AUTHOR_LAST_NAME', u'Tataru'), ('TITLE', u'Global'), ('TITLE', u'Schrdinger'), ('TITLE', u'maps'), ('TITLE', u'in'), ('TITLE', u'dimensions'), ('TITLE', u'd\\ge'), ('TITLE', u'2:'), ('TITLE', u'small'), ('TITLE', u'data'), ('TITLE', u'in'), ('TITLE', u'the'), ('TITLE', u'critical'), ('TITLE', u'Sobolev'), ('TITLE', u'spaces'), ('JOURNAL', u'Ann.'), ('JOURNAL', u'Math.'), ('VOLUME', u'173'), ('YEAR', u'2011'), ('PAGE', u'1443'), ('DOI', u'10.4007/annals.2011.173.3.5'), ('REFPLAINTEXT', u'Bejenaru, I., Ionescu, A.D., Kenig, C.E., Tataru, D.: Global Schr\xf6dinger maps in dimensions d\\ge 2: small data in the critical Sobolev spaces. Ann. Math. 173, 1443\u20131506 (2011)'), ('REFSTR', "{u'bibunstructured': u'Bejenaru, I., Ionescu, A.D., Kenig, C.E., Tataru, D.: Global Schr\\xf6dinger maps in dimensions d\\\\ge 2: small data in the critical Sobolev spaces. Ann. Math. 173, 1443\\u20131506 (2011)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Bejenaru', u'initials': u'I'}, {u'familyname': u'Ionescu', u'initials': u'AD'}, {u'familyname': u'Kenig', u'initials': u'CE'}, {u'familyname': u'Tataru', u'initials': u'D'}], u'occurrence': [{u'handle': u'2800718', u'@type': u'AMSID'}, {u'handle': u'10.4007/annals.2011.173.3.5', u'@type': u'DOI'}], u'journaltitle': u'Ann. Math.', u'volumeid': u'173', u'firstpage': u'1443', u'lastpage': u'1506', u'year': u'2011', u'articletitle': {u'#text': u'Global Schr\\xf6dinger maps in dimensions d\\\\ge 2: small data in the critical Sobolev spaces', u'@language': u'En'}}, u'citationnumber': u'2.', u'@id': u'CR2'}")],
[('AUTHOR_FIRST_NAME', u'Earl A'), ('AUTHOR_LAST_NAME', u'Coddington'), ('YEAR', u'1955'), ('PUBLISHER', u'Levinson'), ('PUBLISHER', u'Norman:'), ('PUBLISHER', u'Theory'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Ordinary'), ('PUBLISHER', u'Differential'), ('PUBLISHER', u'Equations'), ('REFPLAINTEXT', u'Coddington, Earl A.: Levinson Norman: Theory of Ordinary Differential Equations. McGraw-Hill Book Company Inc, New York (1955)'), ('REFSTR', "{u'bibunstructured': u'Coddington, Earl A.: Levinson Norman: Theory of Ordinary Differential Equations. McGraw-Hill Book Company Inc, New York (1955)', u'citationnumber': u'3.', u'@id': u'CR3', u'bibbook': {u'publisherlocation': u'New York', u'bibauthorname': {u'familyname': u'Coddington', u'initials': u'Earl A'}, u'publishername': u'McGraw-Hill Book Company Inc', u'booktitle': u'Levinson Norman: Theory of Ordinary Differential Equations', u'year': u'1955'}}")],
[('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Ding'), ('AUTHOR_FIRST_NAME', u'B'), ('AUTHOR_LAST_NAME', u'Guo'), ('TITLE', u'Hausdorff'), ('TITLE', u'measure'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'singular'), ('TITLE', u'set'), ('TITLE', u'of'), ('TITLE', u'LandauLifshitz'), ('TITLE', u'equations'), ('TITLE', u'with'), ('TITLE', u'a'), ('TITLE', u'nonlocal'), ('TITLE', u'term'), ('JOURNAL', u'Commun.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'250'), ('ISSUE', u'1'), ('YEAR', u'2004'), ('PAGE', u'95'), ('DOI', u'10.1007/s00220-004-1120-9'), ('REFPLAINTEXT', u'Ding, S., Guo, B.: Hausdorff measure of the singular set of Landau\u2013Lifshitz equations with a nonlocal term. Commun. Math. Phys. 250(1), 95\u2013117 (2004)'), ('REFSTR', "{u'bibunstructured': u'Ding, S., Guo, B.: Hausdorff measure of the singular set of Landau\\u2013Lifshitz equations with a nonlocal term. Commun. Math. Phys. 250(1), 95\\u2013117 (2004)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Ding', u'initials': u'S'}, {u'familyname': u'Guo', u'initials': u'B'}], u'issueid': u'1', u'journaltitle': u'Commun. Math. Phys.', u'volumeid': u'250', u'firstpage': u'95', u'lastpage': u'117', u'year': u'2004', u'articletitle': {u'#text': u'Hausdorff measure of the singular set of Landau\\u2013Lifshitz equations with a nonlocal term', u'@language': u'En'}, u'occurrence': [{u'handle': u'2092031', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00220-004-1120-9', u'@type': u'DOI'}]}, u'citationnumber': u'4.', u'@id': u'CR4'}")],
[('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Ding'), ('AUTHOR_FIRST_NAME', u'B'), ('AUTHOR_LAST_NAME', u'Guo'), ('TITLE', u'Existence'), ('TITLE', u'of'), ('TITLE', u'partially'), ('TITLE', u'regular'), ('TITLE', u'weak'), ('TITLE', u'solutions'), ('TITLE', u'to'), ('TITLE', u'LandauLifshitzMaxwell'), ('TITLE', u'equations'), ('JOURNAL', u'J.'), ('JOURNAL', u'Differ.'), ('JOURNAL', u'Equ.'), ('VOLUME', u'244'), ('YEAR', u'2008'), ('PAGE', u'2448'), ('DOI', u'10.1016/j.jde.2008.02.029'), ('REFPLAINTEXT', u'Ding, S., Guo, B.: Existence of partially regular weak solutions to Landau\u2013Lifshitz\u2013Maxwell equations. J. Differ. Equ. 244, 2448\u20132472 (2008)'), ('REFSTR', "{u'bibunstructured': u'Ding, S., Guo, B.: Existence of partially regular weak solutions to Landau\\u2013Lifshitz\\u2013Maxwell equations. J. Differ. Equ. 244, 2448\\u20132472 (2008)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Ding', u'initials': u'S'}, {u'familyname': u'Guo', u'initials': u'B'}], u'occurrence': [{u'handle': u'2414401', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.jde.2008.02.029', u'@type': u'DOI'}], u'journaltitle': u'J. Differ. Equ.', u'volumeid': u'244', u'firstpage': u'2448', u'lastpage': u'2472', u'year': u'2008', u'articletitle': {u'#text': u'Existence of partially regular weak solutions to Landau\\u2013Lifshitz\\u2013Maxwell equations', u'@language': u'En'}}, u'citationnumber': u'5.', u'@id': u'CR5'}")],
[('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Ding'), ('AUTHOR_FIRST_NAME', u'X'), ('AUTHOR_LAST_NAME', u'Liu'), ('AUTHOR_FIRST_NAME', u'C'), ('AUTHOR_LAST_NAME', u'Wang'), ('TITLE', u'The'), ('TITLE', u'LandauLifshitzMaxwell'), ('TITLE', u'equation'), ('TITLE', u'in'), ('TITLE', u'dimension'), ('TITLE', u'three'), ('JOURNAL', u'Pac.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Math.'), ('VOLUME', u'243'), ('ISSUE', u'2'), ('YEAR', u'2009'), ('PAGE', u'243'), ('DOI', u'10.2140/pjm.2009.243.243'), ('REFPLAINTEXT', u'Ding, S., Liu, X., Wang, C.: The Landau\u2013Lifshitz\u2013Maxwell equation in dimension three. Pac. J. Math. 243(2), 243\u2013276 (2009)'), ('REFSTR', "{u'bibunstructured': u'Ding, S., Liu, X., Wang, C.: The Landau\\u2013Lifshitz\\u2013Maxwell equation in dimension three. Pac. J. Math. 243(2), 243\\u2013276 (2009)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Ding', u'initials': u'S'}, {u'familyname': u'Liu', u'initials': u'X'}, {u'familyname': u'Wang', u'initials': u'C'}], u'issueid': u'2', u'journaltitle': u'Pac. J. Math.', u'volumeid': u'243', u'firstpage': u'243', u'lastpage': u'276', u'year': u'2009', u'articletitle': {u'#text': u'The Landau\\u2013Lifshitz\\u2013Maxwell equation in dimension three', u'@language': u'En'}, u'occurrence': [{u'handle': u'2552258', u'@type': u'AMSID'}, {u'handle': u'10.2140/pjm.2009.243.243', u'@type': u'DOI'}]}, u'citationnumber': u'6.', u'@id': u'CR6'}")],
[('AUTHOR_FIRST_NAME', u'L'), ('AUTHOR_LAST_NAME', u'Evans'), ('YEAR', u'1998'), ('PUBLISHER', u'Partial'), ('PUBLISHER', u'Differential'), ('PUBLISHER', u'Equations'), ('REFPLAINTEXT', u'Evans, L.: Partial Differential Equations. American Mathematical Society, Providence (1998)'), ('REFSTR', "{u'bibunstructured': u'Evans, L.: Partial Differential Equations. American Mathematical Society, Providence (1998)', u'citationnumber': u'7.', u'@id': u'CR7', u'bibbook': {u'bibauthorname': {u'familyname': u'Evans', u'initials': u'L'}, u'publisherlocation': u'Providence', u'occurrence': {u'handle': u'0902.35002', u'@type': u'ZLBID'}, u'booktitle': u'Partial Differential Equations', u'year': u'1998', u'publishername': u'American Mathematical Society'}}")],
[('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Gilbert'), ('TITLE', u'A'), ('TITLE', u'Lagrangian'), ('TITLE', u'formulation'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'gyromagnetic'), ('TITLE', u'equation'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'magnetization'), ('TITLE', u'field'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('VOLUME', u'100'), ('ISSUE', u'52'), ('YEAR', u'1955'), ('PAGE', u'1243'), ('REFPLAINTEXT', u'Gilbert, T.: A Lagrangian formulation of the gyromagnetic equation of the magnetization field. Phys. Rev. 100(52), 1243 (1955)'), ('REFSTR', "{u'bibunstructured': u'Gilbert, T.: A Lagrangian formulation of the gyromagnetic equation of the magnetization field. Phys. Rev. 100(52), 1243 (1955)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Gilbert', u'initials': u'T'}, u'issueid': u'52', u'journaltitle': u'Phys. Rev.', u'volumeid': u'100', u'firstpage': u'1243', u'year': u'1955', u'articletitle': {u'#text': u'A Lagrangian formulation of the gyromagnetic equation of the magnetization field', u'@language': u'En'}}, u'citationnumber': u'8.', u'@id': u'CR8'}")],
[('AUTHOR_FIRST_NAME', u'C'), ('AUTHOR_LAST_NAME', u'Garca-Cervera'), ('AUTHOR_FIRST_NAME', u'X'), ('AUTHOR_LAST_NAME', u'Wang'), ('TITLE', u'Spin-'), ('TITLE', u'Polarized'), ('TITLE', u'transport:'), ('TITLE', u'existence'), ('TITLE', u'of'), ('TITLE', u'weak'), ('TITLE', u'solutions'), ('JOURNAL', u'Discrete'), ('JOURNAL', u'Contin.'), ('JOURNAL', u'Dyn.'), ('JOURNAL', u'Syst.'), ('JOURNAL', u'Ser.'), ('JOURNAL', u'B'), ('VOLUME', u'7'), ('ISSUE', u'1'), ('YEAR', u'2007'), ('PAGE', u'87'), ('DOI', u'10.3934/dcdsb.2007.7.87'), ('REFPLAINTEXT', u'Garc\xeda-Cervera, C., Wang, X.: Spin-Polarized transport: existence of weak solutions. Discrete Contin. Dyn. Syst. Ser. B 7(1), 87\u2013100 (2007)'), ('REFSTR', "{u'bibunstructured': u'Garc\\xeda-Cervera, C., Wang, X.: Spin-Polarized transport: existence of weak solutions. Discrete Contin. Dyn. Syst. Ser. B 7(1), 87\\u2013100 (2007)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Garc\\xeda-Cervera', u'initials': u'C'}, {u'familyname': u'Wang', u'initials': u'X'}], u'issueid': u'1', u'journaltitle': u'Discrete Contin. Dyn. Syst. Ser. B', u'volumeid': u'7', u'firstpage': u'87', u'lastpage': u'100', u'year': u'2007', u'articletitle': {u'#text': u'Spin-Polarized transport: existence of weak solutions', u'@language': u'En'}, u'occurrence': [{u'handle': u'2257453', u'@type': u'AMSID'}, {u'handle': u'10.3934/dcdsb.2007.7.87', u'@type': u'DOI'}]}, u'citationnumber': u'9.', u'@id': u'CR9'}")],
[('AUTHOR_FIRST_NAME', u'B'), ('AUTHOR_LAST_NAME', u'Guo'), ('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Ding'), ('YEAR', u'2008'), ('PUBLISHER', u'Landau\u2013Lifshitz'), ('PUBLISHER', u'Equations'), ('REFPLAINTEXT', u'Guo, B., Ding, S.: Landau\u2013Lifshitz Equations. World Scientific Press, Singapore (2008)'), ('REFSTR', "{u'bibunstructured': u'Guo, B., Ding, S.: Landau\\u2013Lifshitz Equations. World Scientific Press, Singapore (2008)', u'citationnumber': u'10.', u'@id': u'CR10', u'bibbook': {u'bibauthorname': [{u'familyname': u'Guo', u'initials': u'B'}, {u'familyname': u'Ding', u'initials': u'S'}], u'publisherlocation': u'Singapore', u'occurrence': {u'handle': u'10.1142/6658', u'@type': u'DOI'}, u'booktitle': u'Landau\\u2013Lifshitz Equations', u'year': u'2008', u'publishername': u'World Scientific Press'}}")],
[('AUTHOR_FIRST_NAME', u'B'), ('AUTHOR_LAST_NAME', u'Guo'), ('AUTHOR_FIRST_NAME', u'X'), ('AUTHOR_LAST_NAME', u'Pu'), ('TITLE', u'Global'), ('TITLE', u'smooth'), ('TITLE', u'solutions'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'spin'), ('TITLE', u'polarized'), ('TITLE', u'transport'), ('TITLE', u'equation'), ('JOURNAL', u'Electron.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Differ.'), ('JOURNAL', u'Equ.'), ('VOLUME', u'63'), ('YEAR', u'2008'), ('PAGE', u'359'), ('REFPLAINTEXT', u'Guo, B., Pu, X.: Global smooth solutions of the spin polarized transport equation. Electron. J. Differ. Equ. 63, 359\u2013370 (2008)'), ('REFSTR', "{u'bibunstructured': u'Guo, B., Pu, X.: Global smooth solutions of the spin polarized transport equation. Electron. J. Differ. Equ. 63, 359\\u2013370 (2008)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Guo', u'initials': u'B'}, {u'familyname': u'Pu', u'initials': u'X'}], u'occurrence': [{u'handle': u'2411058', u'@type': u'AMSID'}, {u'handle': u'1170.35306', u'@type': u'ZLBID'}], u'journaltitle': u'Electron. J. Differ. Equ.', u'volumeid': u'63', u'firstpage': u'359', u'lastpage': u'370', u'year': u'2008', u'articletitle': {u'#text': u'Global smooth solutions of the spin polarized transport equation', u'@language': u'En'}}, u'citationnumber': u'11.', u'@id': u'CR11'}")],
[('AUTHOR_FIRST_NAME', u'B'), ('AUTHOR_LAST_NAME', u'Guo'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'Su'), ('TITLE', u'Global'), ('TITLE', u'weak'), ('TITLE', u'solution'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'LandauLifshitzMaxwell'), ('TITLE', u'equation'), ('TITLE', u'in'), ('TITLE', u'three'), ('TITLE', u'space'), ('TITLE', u'dimensions'), ('JOURNAL', u'J.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Anal.'), ('JOURNAL', u'Appl.'), ('VOLUME', u'211'), ('YEAR', u'1997'), ('PAGE', u'326'), ('DOI', u'10.1006/jmaa.1997.5467'), ('REFPLAINTEXT', u'Guo, B., Su, F.: Global weak solution for the Landau\u2013Lifshitz\u2013Maxwell equation in three space dimensions. J. Math. Anal. Appl. 211, 326\u2013346 (1997)'), ('REFSTR', "{u'bibunstructured': u'Guo, B., Su, F.: Global weak solution for the Landau\\u2013Lifshitz\\u2013Maxwell equation in three space dimensions. J. Math. Anal. Appl. 211, 326\\u2013346 (1997)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Guo', u'initials': u'B'}, {u'familyname': u'Su', u'initials': u'F'}], u'occurrence': [{u'handle': u'1460175', u'@type': u'AMSID'}, {u'handle': u'10.1006/jmaa.1997.5467', u'@type': u'DOI'}], u'journaltitle': u'J. Math. Anal. Appl.', u'volumeid': u'211', u'firstpage': u'326', u'lastpage': u'346', u'year': u'1997', u'articletitle': {u'#text': u'Global weak solution for the Landau\\u2013Lifshitz\\u2013Maxwell equation in three space dimensions', u'@language': u'En'}}, u'citationnumber': u'12.', u'@id': u'CR12'}")],
[('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'Jochmann'), ('TITLE', u'Existence'), ('TITLE', u'of'), ('TITLE', u'weak'), ('TITLE', u'solutions'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'drift'), ('TITLE', u'diffusion'), ('TITLE', u'model'), ('TITLE', u'coupled'), ('TITLE', u'with'), ('TITLE', u'Maxwells'), ('TITLE', u'equations'), ('JOURNAL', u'J.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Anal.'), ('JOURNAL', u'Appl.'), ('VOLUME', u'204'), ('YEAR', u'1996'), ('PAGE', u'655'), ('DOI', u'10.1006/jmaa.1996.0460'), ('REFPLAINTEXT', u'Jochmann, F.: Existence of weak solutions of the drift diffusion model coupled with Maxwell\u2019s equations. J. Math. Anal. Appl. 204, 655\u2013676 (1996)'), ('REFSTR', "{u'bibunstructured': u'Jochmann, F.: Existence of weak solutions of the drift diffusion model coupled with Maxwell\\u2019s equations. J. Math. Anal. Appl. 204, 655\\u2013676 (1996)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Jochmann', u'initials': u'F'}, u'occurrence': [{u'handle': u'1422765', u'@type': u'AMSID'}, {u'handle': u'10.1006/jmaa.1996.0460', u'@type': u'DOI'}], u'journaltitle': u'J. Math. Anal. Appl.', u'volumeid': u'204', u'firstpage': u'655', u'lastpage': u'676', u'year': u'1996', u'articletitle': {u'#text': u'Existence of weak solutions of the drift diffusion model coupled with Maxwell\\u2019s equations', u'@language': u'En'}}, u'citationnumber': u'13.', u'@id': u'CR13'}")],
[('AUTHOR_FIRST_NAME', u'Tosio'), ('AUTHOR_LAST_NAME', u'Kato'), ('YEAR', u'1975'), ('PAGE', u'25'), ('PUBLISHER', u'Lecture'), ('PUBLISHER', u'Notes'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Mathematics'), ('REFPLAINTEXT', u'Kato, T.: Quasi-linear equations of evolution, with applications to partial differential equations. In: Spectral Theory and Differential Equations. Lecture Notes in Math., vol. 448, pp. 25\u201370. Springer, Berlin (1975)'), ('REFSTR', "{u'bibunstructured': u'Kato, T.: Quasi-linear equations of evolution, with applications to partial differential equations. In: Spectral Theory and Differential Equations. Lecture Notes in Math., vol. 448, pp. 25\\u201370. Springer, Berlin (1975)', u'bibchapter': {u'bibauthorname': {u'familyname': u'Kato', u'initials': u'Tosio'}, u'publisherlocation': u'Berlin, Heidelberg', u'booktitle': u'Lecture Notes in Mathematics', u'firstpage': u'25', u'lastpage': u'70', u'year': u'1975', u'publishername': u'Springer Berlin Heidelberg', u'chaptertitle': {u'#text': u'Quasi-linear equations of evolution, with applications to partial differential equations', u'@language': u'--'}}, u'citationnumber': u'14.', u'@id': u'CR14'}")],
[('AUTHOR_FIRST_NAME', u'L'), ('AUTHOR_LAST_NAME', u'Landau'), ('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Lifshitz'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'theory'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'dispersion'), ('TITLE', u'of'), ('TITLE', u'magnetic'), ('TITLE', u'permeability'), ('TITLE', u'in'), ('TITLE', u'ferromagnetic'), ('TITLE', u'bodies'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Z.'), ('JOURNAL', u'der'), ('JOURNAL', u'Sowjetunion'), ('VOLUME', u'8'), ('YEAR', u'1935'), ('PAGE', u'153'), ('REFPLAINTEXT', u'Landau, L., Lifshitz, E.: On the theory of the dispersion of magnetic permeability in ferromagnetic bodies. Phys. Z. der Sowjetunion 8, 153\u2013169 (1935)'), ('REFSTR', "{u'bibunstructured': u'Landau, L., Lifshitz, E.: On the theory of the dispersion of magnetic permeability in ferromagnetic bodies. Phys. Z. der Sowjetunion 8, 153\\u2013169 (1935)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Landau', u'initials': u'L'}, {u'familyname': u'Lifshitz', u'initials': u'E'}], u'occurrence': {u'handle': u'0012.28501', u'@type': u'ZLBID'}, u'journaltitle': u'Phys. Z. der Sowjetunion', u'volumeid': u'8', u'firstpage': u'153', u'lastpage': u'169', u'year': u'1935', u'articletitle': {u'#text': u'On the theory of the dispersion of magnetic permeability in ferromagnetic bodies', u'@language': u'En'}}, u'citationnumber': u'15.', u'@id': u'CR15'}")],
[('REFPLAINTEXT', u'Moser R.: Partial regularity for the Landau\u2013Lifshitz equation in small dimensions, vol. 26. MPI Preprint (2002)'), ('REFSTR', "{u'bibunstructured': u'Moser R.: Partial regularity for the Landau\\u2013Lifshitz equation in small dimensions, vol. 26. MPI Preprint (2002)', u'citationnumber': u'16.', u'@id': u'CR16'}")],
[('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Pazy'), ('YEAR', u'1983'), ('PUBLISHER', u'Semigroups'), ('PUBLISHER', u'of'), ('PUBLISHER', u'linear'), ('PUBLISHER', u'operators'), ('PUBLISHER', u'and'), ('PUBLISHER', u'applications'), ('PUBLISHER', u'to'), ('PUBLISHER', u'partial'), ('PUBLISHER', u'differential'), ('PUBLISHER', u'equations'), ('REFPLAINTEXT', u'Pazy, A.: Semigroups of linear operators and applications to partial differential equations. Springer, Berlin (1983)'), ('REFSTR', "{u'bibunstructured': u'Pazy, A.: Semigroups of linear operators and applications to partial differential equations. Springer, Berlin (1983)', u'citationnumber': u'17.', u'@id': u'CR17', u'bibbook': {u'bibauthorname': {u'familyname': u'Pazy', u'initials': u'A'}, u'publisherlocation': u'Berlin', u'occurrence': {u'handle': u'10.1007/978-1-4612-5561-1', u'@type': u'DOI'}, u'booktitle': u'Semigroups of linear operators and applications to partial differential equations', u'year': u'1983', u'publishername': u'Springer'}}")],
[('AUTHOR_FIRST_NAME', u'X'), ('AUTHOR_LAST_NAME', u'Pu'), ('AUTHOR_FIRST_NAME', u'B'), ('AUTHOR_LAST_NAME', u'Guo'), ('TITLE', u'Global'), ('TITLE', u'smooth'), ('TITLE', u'solutions'), ('TITLE', u'for'), ('TITLE', u'the'), ('TITLE', u'one-'), ('TITLE', u'dimensional'), ('TITLE', u'spin-'), ('TITLE', u'polarized'), ('TITLE', u'transport'), ('TITLE', u'equation'), ('JOURNAL', u'Nonlinear'), ('JOURNAL', u'Anal.'), ('VOLUME', u'72'), ('YEAR', u'2010'), ('PAGE', u'1481'), ('DOI', u'10.1016/j.na.2009.08.032'), ('REFPLAINTEXT', u'Pu, X., Guo, B.: Global smooth solutions for the one-dimensional spin-polarized transport equation. Nonlinear Anal. 72, 1481\u20131487 (2010)'), ('REFSTR', "{u'bibunstructured': u'Pu, X., Guo, B.: Global smooth solutions for the one-dimensional spin-polarized transport equation. Nonlinear Anal. 72, 1481\\u20131487 (2010)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Pu', u'initials': u'X'}, {u'familyname': u'Guo', u'initials': u'B'}], u'occurrence': [{u'handle': u'2577549', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.na.2009.08.032', u'@type': u'DOI'}], u'journaltitle': u'Nonlinear Anal.', u'volumeid': u'72', u'firstpage': u'1481', u'lastpage': u'1487', u'year': u'2010', u'articletitle': {u'#text': u'Global smooth solutions for the one-dimensional spin-polarized transport equation', u'@language': u'En'}}, u'citationnumber': u'18.', u'@id': u'CR18'}")],
[('ARXIV', '1808.01798'), ('REFPLAINTEXT', u'Pu, X., Wang, W.: Partial regularity to the Landau\u2013Lifshitz equation with spin accumulation.'), ('REFSTR', "{u'bibunstructured': {u'#text': u'Pu, X., Wang, W.: Partial regularity to the Landau\\u2013Lifshitz equation with spin accumulation.', u'externalref': {u'refsource': u'arXiv:1808.01798', u'reftarget': {u'@address': u'http://arxiv.org/abs/1808.01798', u'@targettype': u'URL'}}}, u'citationnumber': u'19.', u'@id': u'CR19'}")],
[('AUTHOR_FIRST_NAME', u'X'), ('AUTHOR_LAST_NAME', u'Pu'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Wang'), ('AUTHOR_FIRST_NAME', u'W'), ('AUTHOR_LAST_NAME', u'Wang'), ('TITLE', u'The'), ('TITLE', u'LandauLifshitz'), ('TITLE', u'equation'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'ferromagnetic'), ('TITLE', u'spin'), ('TITLE', u'chain'), ('TITLE', u'and'), ('TITLE', u'OseenFrank'), ('TITLE', u'flow'), ('JOURNAL', u'SIAM'), ('JOURNAL', u'J.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Anal.'), ('VOLUME', u'49'), ('ISSUE', u'6'), ('YEAR', u'2017'), ('PAGE', u'5134'), ('DOI', u'10.1137/16M1094907'), ('REFPLAINTEXT', u'Pu, X., Wang, M., Wang, W.: The Landau\u2013Lifshitz equation of the ferromagnetic spin chain and Oseen\u2013Frank flow. SIAM J. Math. Anal. 49(6), 5134\u20135157 (2017)'), ('REFSTR', "{u'bibunstructured': u'Pu, X., Wang, M., Wang, W.: The Landau\\u2013Lifshitz equation of the ferromagnetic spin chain and Oseen\\u2013Frank flow. SIAM J. Math. Anal. 49(6), 5134\\u20135157 (2017)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Pu', u'initials': u'X'}, {u'familyname': u'Wang', u'initials': u'M'}, {u'familyname': u'Wang', u'initials': u'W'}], u'issueid': u'6', u'journaltitle': u'SIAM J. Math. Anal.', u'volumeid': u'49', u'firstpage': u'5134', u'lastpage': u'5157', u'year': u'2017', u'articletitle': {u'#text': u'The Landau\\u2013Lifshitz equation of the ferromagnetic spin chain and Oseen\\u2013Frank flow', u'@language': u'En'}, u'occurrence': [{u'handle': u'3738308', u'@type': u'AMSID'}, {u'handle': u'10.1137/16M1094907', u'@type': u'DOI'}]}, u'citationnumber': u'20.', u'@id': u'CR20'}")],
[('AUTHOR_FIRST_NAME', u'J'), ('AUTHOR_LAST_NAME', u'Simon'), ('TITLE', u'Compact'), ('TITLE', u'sets'), ('TITLE', u'in'), ('TITLE', u'the'), ('TITLE', u'space'), ('TITLE', u'L(0,'), ('TITLE', u'T;B)'), ('JOURNAL', u'Ann.'), ('JOURNAL', u'Mat.'), ('JOURNAL', u'Pura'), ('JOURNAL', u'Appl.'), ('VOLUME', u'196'), ('YEAR', u'1987'), ('PAGE', u'65'), ('REFPLAINTEXT', u'Simon, J.: Compact sets in the space L(0, T;B). Ann. Mat. Pura Appl. 196, 65\u201396 (1987)'), ('REFSTR', "{u'bibunstructured': {u'#text': u'Simon, J.: Compact sets in the space L(0, T;B). Ann. Mat. Pura Appl. 196, 65\\u201396 (1987)', u'sup': u'p'}, u'bibarticle': {u'bibauthorname': {u'familyname': u'Simon', u'initials': u'J'}, u'occurrence': {u'handle': u'0629.46031', u'@type': u'ZLBID'}, u'journaltitle': u'Ann. Mat. Pura Appl.', u'volumeid': u'196', u'firstpage': u'65', u'lastpage': u'96', u'year': u'1987', u'articletitle': {u'#text': u'Compact sets in the space L(0, T;B)', u'sup': u'p', u'@language': u'En'}}, u'citationnumber': u'21.', u'@id': u'CR21'}")],
[('AUTHOR_FIRST_NAME', u'C'), ('AUTHOR_LAST_NAME', u'Wang'), ('TITLE', u'On'), ('TITLE', u'LandauLifshitz'), ('TITLE', u'equation'), ('TITLE', u'in'), ('TITLE', u'dimensions'), ('TITLE', u'at'), ('TITLE', u'most'), ('TITLE', u'four'), ('JOURNAL', u'Indiana'), ('JOURNAL', u'Univ.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'J.'), ('VOLUME', u'55'), ('ISSUE', u'5'), ('YEAR', u'2006'), ('PAGE', u'1615'), ('DOI', u'10.1512/iumj.2006.55.2810'), ('REFPLAINTEXT', u'Wang, C.: On Landau\u2013Lifshitz equation in dimensions at most four. Indiana Univ. Math. J. 55(5), 1615\u20131644 (2006)'), ('REFSTR', "{u'bibunstructured': u'Wang, C.: On Landau\\u2013Lifshitz equation in dimensions at most four. Indiana Univ. Math. J. 55(5), 1615\\u20131644 (2006)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Wang', u'initials': u'C'}, u'issueid': u'5', u'journaltitle': u'Indiana Univ. Math. J.', u'volumeid': u'55', u'firstpage': u'1615', u'lastpage': u'1644', u'year': u'2006', u'articletitle': {u'#text': u'On Landau\\u2013Lifshitz equation in dimensions at most four', u'@language': u'En'}, u'occurrence': [{u'handle': u'2270931', u'@type': u'AMSID'}, {u'handle': u'10.1512/iumj.2006.55.2810', u'@type': u'DOI'}]}, u'citationnumber': u'22.', u'@id': u'CR22'}")],
[('AUTHOR_FIRST_NAME', u'N'), ('AUTHOR_LAST_NAME', u'Zamponi'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Jngel'), ('TITLE', u'Analysis'), ('TITLE', u'of'), ('TITLE', u'a'), ('TITLE', u'coupled'), ('TITLE', u'spin'), ('TITLE', u'driftdiffusion'), ('TITLE', u'MaxwellLandauLifshitz'), ('TITLE', u'system'), ('JOURNAL', u'J.'), ('JOURNAL', u'Differ.'), ('JOURNAL', u'Equ.'), ('VOLUME', u'260'), ('ISSUE', u'9'), ('YEAR', u'2016'), ('PAGE', u'6828'), ('DOI', u'10.1016/j.jde.2016.01.010'), ('REFPLAINTEXT', u'Zamponi, N., J\xfcngel, A.: Analysis of a coupled spin drift\u2013diffusion Maxwell\u2013Landau\u2013Lifshitz system. J. Differ. Equ. 260(9), 6828\u20136854 (2016)'), ('REFSTR', "{u'bibunstructured': u'Zamponi, N., J\\xfcngel, A.: Analysis of a coupled spin drift\\u2013diffusion Maxwell\\u2013Landau\\u2013Lifshitz system. J. Differ. Equ. 260(9), 6828\\u20136854 (2016)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Zamponi', u'initials': u'N'}, {u'familyname': u'J\\xfcngel', u'initials': u'A'}], u'issueid': u'9', u'journaltitle': u'J. Differ. Equ.', u'volumeid': u'260', u'firstpage': u'6828', u'lastpage': u'6854', u'year': u'2016', u'articletitle': {u'#text': u'Analysis of a coupled spin drift\\u2013diffusion Maxwell\\u2013Landau\\u2013Lifshitz system', u'@language': u'En'}, u'occurrence': [{u'handle': u'3461086', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.jde.2016.01.010', u'@type': u'DOI'}]}, u'citationnumber': u'23.', u'@id': u'CR23'}")],
[('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Zheng'), ('AUTHOR_FIRST_NAME', u'PM'), ('AUTHOR_LAST_NAME', u'Levy'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Fert'), ('TITLE', u'Mechanisms'), ('TITLE', u'of'), ('TITLE', u'spin-'), ('TITLE', u'polarized'), ('TITLE', u'current-'), ('TITLE', u'driven'), ('TITLE', u'magnetization'), ('TITLE', u'switching'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'Lett.'), ('VOLUME', u'88'), ('ISSUE', u'23'), ('YEAR', u'2002'), ('PAGE', u'236601'), ('REFPLAINTEXT', u'Zheng, S., Levy, P.M., Fert, A.: Mechanisms of spin-polarized current-driven magnetization switching. Phys. Rev. Lett. 88(23), 236601 (2002)'), ('REFSTR', "{u'bibunstructured': u'Zheng, S., Levy, P.M., Fert, A.: Mechanisms of spin-polarized current-driven magnetization switching. Phys. Rev. Lett. 88(23), 236601 (2002)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Zheng', u'initials': u'S'}, {u'familyname': u'Levy', u'initials': u'PM'}, {u'familyname': u'Fert', u'initials': u'A'}], u'issueid': u'23', u'journaltitle': u'Phys. Rev. Lett.', u'volumeid': u'88', u'firstpage': u'236601', u'year': u'2002', u'articletitle': {u'#text': u'Mechanisms of spin-polarized current-driven magnetization switching', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1103/PhysRevLett.88.236601', u'@type': u'DOI'}}, u'citationnumber': u'24.', u'@id': u'CR24'}")],
[('AUTHOR_FIRST_NAME', u'MJ'), ('AUTHOR_LAST_NAME', u'Ablowitz'), ('AUTHOR_FIRST_NAME', u'PA'), ('AUTHOR_LAST_NAME', u'Clarkson'), ('YEAR', u'1991'), ('PUBLISHER', u'Solitons,'), ('PUBLISHER', u'Nonlinear'), ('PUBLISHER', u'Evolution'), ('PUBLISHER', u'Equations'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Inverse'), ('PUBLISHER', u'Scattering'), ('REFPLAINTEXT', u'Ablowitz, M.J., Clarkson, P.A.: Solitons, Nonlinear Evolution Equations and Inverse Scattering. Cambridge University Press, Cambridge (1991)'), ('REFSTR', "{u'bibunstructured': u'Ablowitz, M.J., Clarkson, P.A.: Solitons, Nonlinear Evolution Equations and Inverse Scattering. Cambridge University Press, Cambridge (1991)', u'citationnumber': u'1.', u'@id': u'CR1', u'bibbook': {u'bibauthorname': [{u'familyname': u'Ablowitz', u'initials': u'MJ'}, {u'familyname': u'Clarkson', u'initials': u'PA'}], u'publisherlocation': u'Cambridge', u'occurrence': {u'handle': u'10.1017/CBO9780511623998', u'@type': u'DOI'}, u'booktitle': u'Solitons, Nonlinear Evolution Equations and Inverse Scattering', u'year': u'1991', u'publishername': u'Cambridge University Press'}}")],
[('AUTHOR_FIRST_NAME', u'JD'), ('AUTHOR_LAST_NAME', u'Achenbach'), ('YEAR', u'1973'), ('PUBLISHER', u'Wave'), ('PUBLISHER', u'Propagation'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Elastic'), ('PUBLISHER', u'Solids'), ('REFPLAINTEXT', u'Achenbach, J.D.: Wave Propagation in Elastic Solids. North Holland Publishing Company, Amsterdam (1973)'), ('REFSTR', "{u'bibunstructured': u'Achenbach, J.D.: Wave Propagation in Elastic Solids. North Holland Publishing Company, Amsterdam (1973)', u'citationnumber': u'2.', u'@id': u'CR2', u'bibbook': {u'bibauthorname': {u'familyname': u'Achenbach', u'initials': u'JD'}, u'publisherlocation': u'Amsterdam', u'occurrence': {u'handle': u'0268.73005', u'@type': u'ZLBID'}, u'booktitle': u'Wave Propagation in Elastic Solids', u'year': u'1973', u'publishername': u'North Holland Publishing Company'}}")],
[('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Ahmetolan'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Teymur'), ('TITLE', u'Non-'), ('TITLE', u'linear'), ('TITLE', u'modulation'), ('TITLE', u'of'), ('TITLE', u'SH'), ('TITLE', u'waves'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'two-'), ('TITLE', u'layered'), ('TITLE', u'plate'), ('TITLE', u'and'), ('TITLE', u'formation'), ('TITLE', u'of'), ('TITLE', u'surface'), ('TITLE', u'SH'), ('TITLE', u'waves'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Non'), ('JOURNAL', u'Linear'), ('JOURNAL', u'Mech.'), ('VOLUME', u'38'), ('YEAR', u'2003'), ('PAGE', u'1237'), ('DOI', u'10.1016/S0020-7462(02)00070-7'), ('REFPLAINTEXT', u'Ahmetolan, S., Teymur, M.: Non-linear modulation of SH waves in a two-layered plate and formation of surface SH waves. Int. J. Non Linear Mech. 38, 1237\u20131250 (2003)'), ('REFSTR', "{u'bibunstructured': u'Ahmetolan, S., Teymur, M.: Non-linear modulation of SH waves in a two-layered plate and formation of surface SH waves. Int. J. Non Linear Mech. 38, 1237\\u20131250 (2003)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Ahmetolan', u'initials': u'S'}, {u'familyname': u'Teymur', u'initials': u'M'}], u'occurrence': [{u'handle': u'1955183', u'@type': u'AMSID'}, {u'handle': u'10.1016/S0020-7462(02)00070-7', u'@type': u'DOI'}], u'journaltitle': u'Int. J. Non Linear Mech.', u'volumeid': u'38', u'firstpage': u'1237', u'lastpage': u'1250', u'year': u'2003', u'articletitle': {u'#text': u'Non-linear modulation of SH waves in a two-layered plate and formation of surface SH waves', u'@outputmedium': u'All', u'@language': u'En'}}, u'citationnumber': u'3.', u'@id': u'CR3'}")],
[('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Ahmetolan'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Teymur'), ('TITLE', u'Nonlinear'), ('TITLE', u'modulation'), ('TITLE', u'of'), ('TITLE', u'SH'), ('TITLE', u'waves'), ('TITLE', u'in'), ('TITLE', u'an'), ('TITLE', u'incompressible'), ('TITLE', u'hyperelastic'), ('TITLE', u'plate'), ('JOURNAL', u'Z.'), ('JOURNAL', u'Angew.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'58'), ('YEAR', u'2007'), ('PAGE', u'457'), ('DOI', u'10.1007/s00033-005-0056-z'), ('REFPLAINTEXT', u'Ahmetolan, S., Teymur, M.: Nonlinear modulation of SH waves in an incompressible hyperelastic plate. Z. Angew. Math. Phys. 58, 457\u2013474 (2007)'), ('REFSTR', "{u'bibunstructured': u'Ahmetolan, S., Teymur, M.: Nonlinear modulation of SH waves in an incompressible hyperelastic plate. Z. Angew. Math. Phys. 58, 457\\u2013474 (2007)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Ahmetolan', u'initials': u'S'}, {u'familyname': u'Teymur', u'initials': u'M'}], u'occurrence': [{u'handle': u'2320226', u'@type': u'AMSID'}, {u'handle': u'10.1007/s00033-005-0056-z', u'@type': u'DOI'}], u'journaltitle': u'Z. Angew. Math. Phys.', u'volumeid': u'58', u'firstpage': u'457', u'lastpage': u'474', u'year': u'2007', u'articletitle': {u'#text': u'Nonlinear modulation of SH waves in an incompressible hyperelastic plate', u'@language': u'En'}}, u'citationnumber': u'4.', u'@id': u'CR4'}")],
[('AUTHOR_FIRST_NAME', u'K'), ('AUTHOR_LAST_NAME', u'Bataille'), ('AUTHOR_FIRST_NAME', u'F'), ('AUTHOR_LAST_NAME', u'Lund'), ('TITLE', u'Nonlinear'), ('TITLE', u'waves'), ('TITLE', u'in'), ('TITLE', u'elastic'), ('TITLE', u'media'), ('JOURNAL', u'Physica'), ('JOURNAL', u'D'), ('VOLUME', u'6'), ('ISSUE', u'1'), ('YEAR', u'1982'), ('PAGE', u'95'), ('DOI', u'10.1016/0167-2789(82)90007-0'), ('REFPLAINTEXT', u'Bataille, K., Lund, F.: Nonlinear waves in elastic media. Physica D 6(1), 95\u2013104 (1982)'), ('REFSTR', "{u'bibunstructured': u'Bataille, K., Lund, F.: Nonlinear waves in elastic media. Physica D 6(1), 95\\u2013104 (1982)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Bataille', u'initials': u'K'}, {u'familyname': u'Lund', u'initials': u'F'}], u'issueid': u'1', u'journaltitle': u'Physica D', u'volumeid': u'6', u'firstpage': u'95', u'lastpage': u'104', u'year': u'1982', u'articletitle': {u'#text': u'Nonlinear waves in elastic media', u'@language': u'En'}, u'occurrence': [{u'handle': u'680897', u'@type': u'AMSID'}, {u'handle': u'10.1016/0167-2789(82)90007-0', u'@type': u'DOI'}]}, u'citationnumber': u'5.', u'@id': u'CR5'}")],
[('AUTHOR_FIRST_NAME', u'MM'), ('AUTHOR_LAST_NAME', u'Carroll'), ('TITLE', u'Some'), ('TITLE', u'results'), ('TITLE', u'on'), ('TITLE', u'finite'), ('TITLE', u'amplitude'), ('TITLE', u'elastic'), ('TITLE', u'waves'), ('JOURNAL', u'Acta'), ('JOURNAL', u'Mech.'), ('VOLUME', u'3'), ('YEAR', u'1967'), ('PAGE', u'167'), ('REFPLAINTEXT', u'Carroll, M.M.: Some results on finite amplitude elastic waves. Acta Mech. 3, 167\u2013181 (1967)'), ('REFSTR', "{u'bibunstructured': u'Carroll, M.M.: Some results on finite amplitude elastic waves. Acta Mech. 3, 167\\u2013181 (1967)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Carroll', u'initials': u'MM'}, u'occurrence': {u'handle': u'10.1007/BF01453713', u'@type': u'DOI'}, u'journaltitle': u'Acta Mech.', u'volumeid': u'3', u'firstpage': u'167', u'lastpage': u'181', u'year': u'1967', u'articletitle': {u'#text': u'Some results on finite amplitude elastic waves', u'@language': u'En'}}, u'citationnumber': u'6.', u'@id': u'CR6'}")],
[('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Deliktas'), ('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Teymur'), ('TITLE', u'Surface'), ('TITLE', u'shear'), ('TITLE', u'horizontal'), ('TITLE', u'waves'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'double-'), ('TITLE', u'layered'), ('TITLE', u'nonlinear'), ('TITLE', u'elastic'), ('TITLE', u'half'), ('TITLE', u'space'), ('JOURNAL', u'IMA'), ('JOURNAL', u'J.'), ('JOURNAL', u'Appl.'), ('JOURNAL', u'Math.'), ('VOLUME', u'83'), ('ISSUE', u'3'), ('YEAR', u'2018'), ('PAGE', u'471'), ('DOI', u'10.1093/imamat/hxy009'), ('REFPLAINTEXT', u'Deliktas, E., Teymur, M.: Surface shear horizontal waves in a double-layered nonlinear elastic half space. IMA J. Appl. Math. 83(3), 471\u2013495 (2018)'), ('REFSTR', "{u'bibunstructured': u'Deliktas, E., Teymur, M.: Surface shear horizontal waves in a double-layered nonlinear elastic half space. IMA J. Appl. Math. 83(3), 471\\u2013495 (2018)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Deliktas', u'initials': u'E'}, {u'familyname': u'Teymur', u'initials': u'M'}], u'issueid': u'3', u'journaltitle': u'IMA J. Appl. Math.', u'volumeid': u'83', u'firstpage': u'471', u'lastpage': u'495', u'year': u'2018', u'articletitle': {u'#text': u'Surface shear horizontal waves in a double-layered nonlinear elastic half space', u'@language': u'En'}, u'occurrence': [{u'handle': u'3810216', u'@type': u'AMSID'}, {u'handle': u'10.1093/imamat/hxy009', u'@type': u'DOI'}]}, u'citationnumber': u'7.', u'@id': u'CR7'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Destrade'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Saccomandi'), ('TITLE', u'Finite'), ('TITLE', u'amplitude'), ('TITLE', u'elastic'), ('TITLE', u'waves'), ('TITLE', u'propagating'), ('TITLE', u'in'), ('TITLE', u'compressible'), ('TITLE', u'solids'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'E'), ('VOLUME', u'72'), ('YEAR', u'2005'), ('PAGE', u'016620'), ('DOI', u'10.1103/PhysRevE.72.016620'), ('REFPLAINTEXT', u'Destrade, M., Saccomandi, G.: Finite amplitude elastic waves propagating in compressible solids. Phys. Rev. E 72, 016620 (2005)'), ('REFSTR', "{u'bibunstructured': u'Destrade, M., Saccomandi, G.: Finite amplitude elastic waves propagating in compressible solids. Phys. Rev. E 72, 016620 (2005)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Destrade', u'initials': u'M'}, {u'familyname': u'Saccomandi', u'initials': u'G'}], u'occurrence': [{u'handle': u'2178391', u'@type': u'AMSID'}, {u'handle': u'10.1103/PhysRevE.72.016620', u'@type': u'DOI'}], u'journaltitle': u'Phys. Rev. E', u'volumeid': u'72', u'firstpage': u'016620', u'year': u'2005', u'articletitle': {u'#text': u'Finite amplitude elastic waves propagating in compressible solids', u'@language': u'En'}}, u'citationnumber': u'8.', u'@id': u'CR8'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Destrade'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Saccomandi'), ('TITLE', u'Solitary'), ('TITLE', u'and'), ('TITLE', u'compactlike'), ('TITLE', u'shear'), ('TITLE', u'waves'), ('TITLE', u'in'), ('TITLE', u'the'), ('TITLE', u'bulk'), ('TITLE', u'of'), ('TITLE', u'solids'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'E'), ('VOLUME', u'73'), ('YEAR', u'2006'), ('PAGE', u'065604(R)'), ('DOI', u'10.1103/PhysRevE.73.065604'), ('REFPLAINTEXT', u'Destrade, M., Saccomandi, G.: Solitary and compactlike shear waves in the bulk of solids. Phys. Rev. E 73, 065604(R) (2006)'), ('REFSTR', "{u'bibunstructured': u'Destrade, M., Saccomandi, G.: Solitary and compactlike shear waves in the bulk of solids. Phys. Rev. E 73, 065604(R) (2006)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Destrade', u'initials': u'M'}, {u'familyname': u'Saccomandi', u'initials': u'G'}], u'occurrence': [{u'handle': u'2276285', u'@type': u'AMSID'}, {u'handle': u'10.1103/PhysRevE.73.065604', u'@type': u'DOI'}], u'journaltitle': u'Phys. Rev. E', u'volumeid': u'73', u'firstpage': u'065604(R)', u'year': u'2006', u'articletitle': {u'#text': u'Solitary and compactlike shear waves in the bulk of solids', u'@language': u'En'}}, u'citationnumber': u'9.', u'@id': u'CR9'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Destrade'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Saccomandi'), ('TITLE', u'Nonlinear'), ('TITLE', u'transverse'), ('TITLE', u'waves'), ('TITLE', u'in'), ('TITLE', u'deformed'), ('TITLE', u'dispersive'), ('TITLE', u'solids'), ('JOURNAL', u'Wave'), ('JOURNAL', u'Motion'), ('VOLUME', u'45'), ('YEAR', u'2008'), ('PAGE', u'325'), ('DOI', u'10.1016/j.wavemoti.2007.07.002'), ('REFPLAINTEXT', u'Destrade, M., Saccomandi, G.: Nonlinear transverse waves in deformed dispersive solids. Wave Motion 45, 325\u2013336 (2008)'), ('REFSTR', "{u'bibunstructured': u'Destrade, M., Saccomandi, G.: Nonlinear transverse waves in deformed dispersive solids. Wave Motion 45, 325\\u2013336 (2008)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Destrade', u'initials': u'M'}, {u'familyname': u'Saccomandi', u'initials': u'G'}], u'occurrence': [{u'handle': u'2450051', u'@type': u'AMSID'}, {u'handle': u'10.1016/j.wavemoti.2007.07.002', u'@type': u'DOI'}], u'journaltitle': u'Wave Motion', u'volumeid': u'45', u'firstpage': u'325', u'lastpage': u'336', u'year': u'2008', u'articletitle': {u'#text': u'Nonlinear transverse waves in deformed dispersive solids', u'@language': u'En'}}, u'citationnumber': u'10.', u'@id': u'CR10'}")],
[('AUTHOR_FIRST_NAME', u'RK'), ('AUTHOR_LAST_NAME', u'Dodd'), ('AUTHOR_FIRST_NAME', u'JC'), ('AUTHOR_LAST_NAME', u'Eilbeck'), ('AUTHOR_FIRST_NAME', u'JD'), ('AUTHOR_LAST_NAME', u'Gibbon'), ('AUTHOR_FIRST_NAME', u'HC'), ('AUTHOR_LAST_NAME', u'Morris'), ('YEAR', u'1982'), ('PUBLISHER', u'Solitons'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Nonlinear'), ('PUBLISHER', u'Wave'), ('PUBLISHER', u'Equations'), ('REFPLAINTEXT', u'Dodd, R.K., Eilbeck, J.C., Gibbon, J.D., Morris, H.C.: Solitons and Nonlinear Wave Equations. Academic Press, London (1982)'), ('REFSTR', "{u'bibunstructured': u'Dodd, R.K., Eilbeck, J.C., Gibbon, J.D., Morris, H.C.: Solitons and Nonlinear Wave Equations. Academic Press, London (1982)', u'citationnumber': u'11.', u'@id': u'CR11', u'bibbook': {u'bibauthorname': [{u'familyname': u'Dodd', u'initials': u'RK'}, {u'familyname': u'Eilbeck', u'initials': u'JC'}, {u'familyname': u'Gibbon', u'initials': u'JD'}, {u'familyname': u'Morris', u'initials': u'HC'}], u'publisherlocation': u'London', u'occurrence': {u'handle': u'0496.35001', u'@type': u'ZLBID'}, u'booktitle': u'Solitons and Nonlinear Wave Equations', u'year': u'1982', u'publishername': u'Academic Press'}}")],
[('AUTHOR_FIRST_NAME', u'AC'), ('AUTHOR_LAST_NAME', u'Eringen'), ('AUTHOR_FIRST_NAME', u'ES'), ('AUTHOR_LAST_NAME', u'Suhubi'), ('YEAR', u'1974'), ('PUBLISHER', u'Elastodynamics'), ('REFPLAINTEXT', u'Eringen, A.C., Suhubi, E.S.: Elastodynamics, vol. I. Academic Press, New York (1974)'), ('REFSTR', "{u'bibunstructured': u'Eringen, A.C., Suhubi, E.S.: Elastodynamics, vol. I. Academic Press, New York (1974)', u'citationnumber': u'12.', u'@id': u'CR12', u'bibbook': {u'bibauthorname': [{u'familyname': u'Eringen', u'initials': u'AC'}, {u'familyname': u'Suhubi', u'initials': u'ES'}], u'publisherlocation': u'New York', u'occurrence': {u'handle': u'0291.73018', u'@type': u'ZLBID'}, u'booktitle': u'Elastodynamics', u'year': u'1974', u'numberinseries': u'I', u'publishername': u'Academic Press'}}")],
[('AUTHOR_FIRST_NAME', u'AC'), ('AUTHOR_LAST_NAME', u'Eringen'), ('AUTHOR_FIRST_NAME', u'ES'), ('AUTHOR_LAST_NAME', u'Suhubi'), ('YEAR', u'1975'), ('PUBLISHER', u'Elastodynamics'), ('REFPLAINTEXT', u'Eringen, A.C., Suhubi, E.S.: Elastodynamics, vol. II. Academic Press, New York (1975)'), ('REFSTR', "{u'bibunstructured': u'Eringen, A.C., Suhubi, E.S.: Elastodynamics, vol. II. Academic Press, New York (1975)', u'citationnumber': u'13.', u'@id': u'CR13', u'bibbook': {u'bibauthorname': [{u'familyname': u'Eringen', u'initials': u'AC'}, {u'familyname': u'Suhubi', u'initials': u'ES'}], u'publisherlocation': u'New York', u'occurrence': {u'handle': u'0344.73036', u'@type': u'ZLBID'}, u'booktitle': u'Elastodynamics', u'year': u'1975', u'numberinseries': u'II', u'publishername': u'Academic Press'}}")],
[('AUTHOR_FIRST_NAME', u'WM'), ('AUTHOR_LAST_NAME', u'Ewing'), ('AUTHOR_FIRST_NAME', u'WS'), ('AUTHOR_LAST_NAME', u'Jardetsky'), ('YEAR', u'1957'), ('PUBLISHER', u'Elastic'), ('PUBLISHER', u'Waves'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Layered'), ('PUBLISHER', u'Media'), ('REFPLAINTEXT', u'Ewing, W.M., Jardetsky, W.S.: Elastic Waves in Layered Media. McGraw-Hill, New York (1957)'), ('REFSTR', "{u'bibunstructured': u'Ewing, W.M., Jardetsky, W.S.: Elastic Waves in Layered Media. McGraw-Hill, New York (1957)', u'citationnumber': u'14.', u'@id': u'CR14', u'bibbook': {u'bibauthorname': [{u'familyname': u'Ewing', u'initials': u'WM'}, {u'familyname': u'Jardetsky', u'initials': u'WS'}], u'publisherlocation': u'New York', u'occurrence': {u'handle': u'10.1063/1.3060203', u'@type': u'DOI'}, u'booktitle': u'Elastic Waves in Layered Media', u'year': u'1957', u'publishername': u'McGraw-Hill'}}")],
[('AUTHOR_FIRST_NAME', u'GW'), ('AUTHOR_LAST_NAME', u'Farnell'), ('YEAR', u'1978'), ('PAGE', u'13'), ('PUBLISHER', u'Acoustic'), ('PUBLISHER', u'Surface'), ('PUBLISHER', u'Waves'), ('REFPLAINTEXT', u'Farnell, G.W.: Types and properties of surface waves. In: Oliner, A.A. (ed.) Acoustic Surface Waves, pp. 13\u201360. Springer, Berlin (1978)'), ('REFSTR', "{u'bibunstructured': u'Farnell, G.W.: Types and properties of surface waves. In: Oliner, A.A. (ed.) Acoustic Surface Waves, pp. 13\\u201360. Springer, Berlin (1978)', u'bibchapter': {u'eds': {u'publisherlocation': u'Berlin', u'occurrence': {u'handle': u'10.1007/3-540-08575-0_9', u'@type': u'DOI'}, u'booktitle': u'Acoustic Surface Waves', u'firstpage': u'13', u'lastpage': u'60', u'publishername': u'Springer'}, u'bibauthorname': {u'familyname': u'Farnell', u'initials': u'GW'}, u'chaptertitle': {u'#text': u'Types and properties of surface waves', u'@language': u'En'}, u'bibeditorname': {u'familyname': u'Oliner', u'initials': u'AA'}, u'year': u'1978'}, u'citationnumber': u'15.', u'@id': u'CR15'}")],
[('AUTHOR_FIRST_NAME', u'Y'), ('AUTHOR_LAST_NAME', u'Fu'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'propagation'), ('TITLE', u'of'), ('TITLE', u'nonlinear'), ('TITLE', u'travelling'), ('TITLE', u'waves'), ('TITLE', u'in'), ('TITLE', u'an'), ('TITLE', u'incompressible'), ('TITLE', u'elastic'), ('TITLE', u'plate'), ('JOURNAL', u'Wave'), ('JOURNAL', u'Motion'), ('VOLUME', u'19'), ('ISSUE', u'3'), ('YEAR', u'1994'), ('PAGE', u'271'), ('DOI', u'10.1016/0165-2125(94)90058-2'), ('REFPLAINTEXT', u'Fu, Y.: On the propagation of nonlinear travelling waves in an incompressible elastic plate. Wave Motion 19(3), 271\u2013292 (1994)'), ('REFSTR', "{u'bibunstructured': u'Fu, Y.: On the propagation of nonlinear travelling waves in an incompressible elastic plate. Wave Motion 19(3), 271\\u2013292 (1994)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Fu', u'initials': u'Y'}, u'issueid': u'3', u'journaltitle': u'Wave Motion', u'volumeid': u'19', u'firstpage': u'271', u'lastpage': u'292', u'year': u'1994', u'articletitle': {u'#text': u'On the propagation of nonlinear travelling waves in an incompressible elastic plate', u'@language': u'En'}, u'occurrence': [{u'handle': u'1276942', u'@type': u'AMSID'}, {u'handle': u'10.1016/0165-2125(94)90058-2', u'@type': u'DOI'}]}, u'citationnumber': u'16.', u'@id': u'CR16'}")],
[('AUTHOR_FIRST_NAME', u'YB'), ('AUTHOR_LAST_NAME', u'Fu'), ('AUTHOR_FIRST_NAME', u'RW'), ('AUTHOR_LAST_NAME', u'Ogden'), ('TITLE', u'Nonlinear'), ('TITLE', u'stability'), ('TITLE', u'analysis'), ('TITLE', u'of'), ('TITLE', u'pre-'), ('TITLE', u'stressed'), ('TITLE', u'elastic'), ('TITLE', u'bodies'), ('JOURNAL', u'Contin.'), ('JOURNAL', u'Mech.'), ('JOURNAL', u'Thermodyn.'), ('VOLUME', u'11'), ('ISSUE', u'3'), ('YEAR', u'1999'), ('PAGE', u'141'), ('DOI', u'10.1007/s001610050108'), ('REFPLAINTEXT', u'Fu, Y.B., Ogden, R.W.: Nonlinear stability analysis of pre-stressed elastic bodies. Contin. Mech. Thermodyn. 11(3), 141\u2013172 (1999)'), ('REFSTR', "{u'bibunstructured': u'Fu, Y.B., Ogden, R.W.: Nonlinear stability analysis of pre-stressed elastic bodies. Contin. Mech. Thermodyn. 11(3), 141\\u2013172 (1999)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Fu', u'initials': u'YB'}, {u'familyname': u'Ogden', u'initials': u'RW'}], u'issueid': u'3', u'journaltitle': u'Contin. Mech. Thermodyn.', u'volumeid': u'11', u'firstpage': u'141', u'lastpage': u'172', u'year': u'1999', u'articletitle': {u'#text': u'Nonlinear stability analysis of pre-stressed elastic bodies', u'@language': u'En'}, u'occurrence': [{u'handle': u'1701411', u'@type': u'AMSID'}, {u'handle': u'10.1007/s001610050108', u'@type': u'DOI'}]}, u'citationnumber': u'17.', u'@id': u'CR17'}")],
[('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Jeffrey'), ('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Kawahara'), ('YEAR', u'1982'), ('PUBLISHER', u'Asymptotic'), ('PUBLISHER', u'Methods'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Nonlinear'), ('PUBLISHER', u'Wave'), ('PUBLISHER', u'Theory'), ('REFPLAINTEXT', u'Jeffrey, A., Kawahara, T.: Asymptotic Methods in Nonlinear Wave Theory. Pitman Advenced Publishing, Boston (1982)'), ('REFSTR', "{u'bibunstructured': u'Jeffrey, A., Kawahara, T.: Asymptotic Methods in Nonlinear Wave Theory. Pitman Advenced Publishing, Boston (1982)', u'citationnumber': u'18.', u'@id': u'CR18', u'bibbook': {u'bibauthorname': [{u'familyname': u'Jeffrey', u'initials': u'A'}, {u'familyname': u'Kawahara', u'initials': u'T'}], u'publisherlocation': u'Boston', u'occurrence': {u'handle': u'0473.35002', u'@type': u'ZLBID'}, u'booktitle': u'Asymptotic Methods in Nonlinear Wave Theory', u'year': u'1982', u'publishername': u'Pitman Advenced Publishing'}}")],
[('AUTHOR_FIRST_NAME', u'T'), ('AUTHOR_LAST_NAME', u'Kakutani'), ('AUTHOR_FIRST_NAME', u'K'), ('AUTHOR_LAST_NAME', u'Michihiro'), ('TITLE', u'Marginal'), ('TITLE', u'state'), ('TITLE', u'of'), ('TITLE', u'modulational'), ('TITLE', u'instability-'), ('TITLE', u'note'), ('TITLE', u'on'), ('TITLE', u'BenjaminFeir'), ('TITLE', u'instability'), ('JOURNAL', u'J.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Soc.'), ('JOURNAL', u'Jpn.'), ('VOLUME', u'52'), ('ISSUE', u'12'), ('YEAR', u'1983'), ('PAGE', u'4129'), ('REFPLAINTEXT', u'Kakutani, T., Michihiro, K.: Marginal state of modulational instability-note on Benjamin\u2013Feir instability. J. Phys. Soc. Jpn. 52(12), 4129\u20134137 (1983)'), ('REFSTR', "{u'bibunstructured': u'Kakutani, T., Michihiro, K.: Marginal state of modulational instability-note on Benjamin\\u2013Feir instability. J. Phys. Soc. Jpn. 52(12), 4129\\u20134137 (1983)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Kakutani', u'initials': u'T'}, {u'familyname': u'Michihiro', u'initials': u'K'}], u'issueid': u'12', u'journaltitle': u'J. Phys. Soc. Jpn.', u'volumeid': u'52', u'firstpage': u'4129', u'lastpage': u'4137', u'year': u'1983', u'articletitle': {u'#text': u'Marginal state of modulational instability-note on Benjamin\\u2013Feir instability', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1143/JPSJ.52.4129', u'@type': u'DOI'}}, u'citationnumber': u'19.', u'@id': u'CR19'}")],
[('AUTHOR_FIRST_NAME', u'P'), ('AUTHOR_LAST_NAME', u'Kayestha'), ('AUTHOR_FIRST_NAME', u'ER'), ('AUTHOR_LAST_NAME', u'Ferreira'), ('AUTHOR_FIRST_NAME', u'AC'), ('AUTHOR_LAST_NAME', u'Wijeyewickrema'), ('TITLE', u'Finite-'), ('TITLE', u'amplitude'), ('TITLE', u'shear'), ('TITLE', u'horizontal'), ('TITLE', u'waves'), ('TITLE', u'propagating'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'pre-'), ('TITLE', u'stressed'), ('TITLE', u'layer'), ('TITLE', u'between'), ('TITLE', u'two'), ('TITLE', u'half-'), ('TITLE', u'spaces'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Solids'), ('JOURNAL', u'Struct.'), ('VOLUME', u'50'), ('YEAR', u'2013'), ('PAGE', u'3586'), ('REFPLAINTEXT', u'Kayestha, P., Ferreira, E.R., Wijeyewickrema, A.C.: Finite-amplitude shear horizontal waves propagating in a pre- stressed layer between two half-spaces. Int. J. Solids Struct. 50, 3586\u20133596 (2013)'), ('REFSTR', "{u'bibunstructured': u'Kayestha, P., Ferreira, E.R., Wijeyewickrema, A.C.: Finite-amplitude shear horizontal waves propagating in a pre- stressed layer between two half-spaces. Int. J. Solids Struct. 50, 3586\\u20133596 (2013)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Kayestha', u'initials': u'P'}, {u'familyname': u'Ferreira', u'initials': u'ER'}, {u'familyname': u'Wijeyewickrema', u'initials': u'AC'}], u'occurrence': {u'handle': u'10.1016/j.ijsolstr.2013.07.002', u'@type': u'DOI'}, u'journaltitle': u'Int. J. Solids Struct.', u'volumeid': u'50', u'firstpage': u'3586', u'lastpage': u'3596', u'year': u'2013', u'articletitle': {u'#text': u'Finite-amplitude shear horizontal waves propagating in a pre- stressed layer between two half-spaces', u'@language': u'En'}}, u'citationnumber': u'20.', u'@id': u'CR20'}")],
[('AUTHOR_FIRST_NAME', u'GA'), ('AUTHOR_LAST_NAME', u'Maugin'), ('AUTHOR_FIRST_NAME', u'H'), ('AUTHOR_LAST_NAME', u'Hadouaj'), ('TITLE', u'Solitary'), ('TITLE', u'surface'), ('TITLE', u'transverse'), ('TITLE', u'waves'), ('TITLE', u'on'), ('TITLE', u'an'), ('TITLE', u'elastic'), ('TITLE', u'substrate'), ('TITLE', u'coated'), ('TITLE', u'with'), ('TITLE', u'a'), ('TITLE', u'thin'), ('TITLE', u'film'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rev.'), ('JOURNAL', u'B'), ('VOLUME', u'44'), ('YEAR', u'1991'), ('PAGE', u'1266'), ('REFPLAINTEXT', u'Maugin, G.A., Hadouaj, H.: Solitary surface transverse waves on an elastic substrate coated with a thin film. Phys. Rev. B 44, 1266\u20131280 (1991)'), ('REFSTR', "{u'bibunstructured': u'Maugin, G.A., Hadouaj, H.: Solitary surface transverse waves on an elastic substrate coated with a thin film. Phys. Rev. B 44, 1266\\u20131280 (1991)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Maugin', u'initials': u'GA'}, {u'familyname': u'Hadouaj', u'initials': u'H'}], u'occurrence': {u'handle': u'10.1103/PhysRevB.44.1266', u'@type': u'DOI'}, u'journaltitle': u'Phys. Rev. B', u'volumeid': u'44', u'firstpage': u'1266', u'lastpage': u'1280', u'year': u'1991', u'articletitle': {u'#text': u'Solitary surface transverse waves on an elastic substrate coated with a thin film', u'@language': u'En'}}, u'citationnumber': u'21.', u'@id': u'CR21'}")],
[('AUTHOR_FIRST_NAME', u'AP'), ('AUTHOR_LAST_NAME', u'Mayer'), ('TITLE', u'Surface'), ('TITLE', u'acoustic'), ('TITLE', u'waves'), ('TITLE', u'in'), ('TITLE', u'nonlinear'), ('TITLE', u'elastic'), ('TITLE', u'media'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'Rep.'), ('VOLUME', u'256'), ('YEAR', u'1995'), ('PAGE', u'237'), ('REFPLAINTEXT', u'Mayer, A.P.: Surface acoustic waves in nonlinear elastic media. Phys. Rep. 256, 237\u2013366 (1995)'), ('REFSTR', "{u'bibunstructured': u'Mayer, A.P.: Surface acoustic waves in nonlinear elastic media. Phys. Rep. 256, 237\\u2013366 (1995)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Mayer', u'initials': u'AP'}, u'occurrence': {u'handle': u'10.1016/0370-1573(94)00088-K', u'@type': u'DOI'}, u'journaltitle': u'Phys. Rep.', u'volumeid': u'256', u'firstpage': u'237', u'lastpage': u'366', u'year': u'1995', u'articletitle': {u'#text': u'Surface acoustic waves in nonlinear elastic media', u'@language': u'En'}}, u'citationnumber': u'22.', u'@id': u'CR22'}")],
[('AUTHOR_FIRST_NAME', u'J'), ('AUTHOR_LAST_NAME', u'Miklowitz'), ('YEAR', u'1978'), ('PUBLISHER', u'The'), ('PUBLISHER', u'Theory'), ('PUBLISHER', u'of'), ('PUBLISHER', u'Elastic'), ('PUBLISHER', u'Waves'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Waveguides'), ('REFPLAINTEXT', u'Miklowitz, J.: The Theory of Elastic Waves and Waveguides. North Holland Publishing Co., Amsterdam (1978)'), ('REFSTR', "{u'bibunstructured': u'Miklowitz, J.: The Theory of Elastic Waves and Waveguides. North Holland Publishing Co., Amsterdam (1978)', u'citationnumber': u'23.', u'@id': u'CR23', u'bibbook': {u'publisherlocation': u'Amsterdam', u'bibauthorname': {u'familyname': u'Miklowitz', u'initials': u'J'}, u'publishername': u'North Holland Publishing Co.', u'booktitle': u'The Theory of Elastic Waves and Waveguides', u'year': u'1978'}}")],
[('AUTHOR_FIRST_NAME', u'AH'), ('AUTHOR_LAST_NAME', u'Nayfeh'), ('TITLE', u'Third-'), ('TITLE', u'harmonic'), ('TITLE', u'resonance'), ('TITLE', u'in'), ('TITLE', u'the'), ('TITLE', u'interaction'), ('TITLE', u'of'), ('TITLE', u'capillary'), ('TITLE', u'and'), ('TITLE', u'gravity'), ('TITLE', u'waves'), ('JOURNAL', u'J.'), ('JOURNAL', u'Fluid'), ('JOURNAL', u'Mech.'), ('VOLUME', u'48'), ('ISSUE', u'2'), ('YEAR', u'1971'), ('PAGE', u'385'), ('REFPLAINTEXT', u'Nayfeh, A.H.: Third-harmonic resonance in the interaction of capillary and gravity waves. J. Fluid Mech. 48(2), 385\u2013395 (1971)'), ('REFSTR', "{u'bibunstructured': u'Nayfeh, A.H.: Third-harmonic resonance in the interaction of capillary and gravity waves. J. Fluid Mech. 48(2), 385\\u2013395 (1971)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Nayfeh', u'initials': u'AH'}, u'issueid': u'2', u'journaltitle': u'J. Fluid Mech.', u'volumeid': u'48', u'firstpage': u'385', u'lastpage': u'395', u'year': u'1971', u'articletitle': {u'#text': u'Third-harmonic resonance in the interaction of capillary and gravity waves', u'@language': u'En'}, u'occurrence': {u'handle': u'10.1017/S0022112071001630', u'@type': u'DOI'}}, u'citationnumber': u'24.', u'@id': u'CR24'}")],
[('AUTHOR_FIRST_NAME', u'DF'), ('AUTHOR_LAST_NAME', u'Parker'), ('AUTHOR_FIRST_NAME', u'FM'), ('AUTHOR_LAST_NAME', u'Talbot'), ('TITLE', u'Analysis'), ('TITLE', u'and'), ('TITLE', u'computation'), ('TITLE', u'for'), ('TITLE', u'nonlinear'), ('TITLE', u'elastic'), ('TITLE', u'surface'), ('TITLE', u'waves'), ('TITLE', u'of'), ('TITLE', u'permanent'), ('TITLE', u'form'), ('JOURNAL', u'J.'), ('JOURNAL', u'Elast.'), ('VOLUME', u'15'), ('ISSUE', u'4'), ('YEAR', u'1985'), ('PAGE', u'389'), ('DOI', u'10.1007/BF00042530'), ('REFPLAINTEXT', u'Parker, D.F., Talbot, F.M.: Analysis and computation for nonlinear elastic surface waves of permanent form. J. Elast. 15(4), 389\u2013426 (1985)'), ('REFSTR', "{u'bibunstructured': u'Parker, D.F., Talbot, F.M.: Analysis and computation for nonlinear elastic surface waves of permanent form. J. Elast. 15(4), 389\\u2013426 (1985)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Parker', u'initials': u'DF'}, {u'familyname': u'Talbot', u'initials': u'FM'}], u'issueid': u'4', u'journaltitle': u'J. Elast.', u'volumeid': u'15', u'firstpage': u'389', u'lastpage': u'426', u'year': u'1985', u'articletitle': {u'#text': u'Analysis and computation for nonlinear elastic surface waves of permanent form', u'@language': u'En'}, u'occurrence': [{u'handle': u'817377', u'@type': u'AMSID'}, {u'handle': u'10.1007/BF00042530', u'@type': u'DOI'}]}, u'citationnumber': u'25.', u'@id': u'CR25'}")],
[('AUTHOR_FIRST_NAME', u'DH'), ('AUTHOR_LAST_NAME', u'Peregrine'), ('TITLE', u'Water'), ('TITLE', u'waves,'), ('TITLE', u'non-'), ('TITLE', u'linear'), ('TITLE', u'Schrdinger'), ('TITLE', u'equations'), ('TITLE', u'and'), ('TITLE', u'their'), ('TITLE', u'solutions'), ('JOURNAL', u'J.'), ('JOURNAL', u'Aust.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Soc.'), ('JOURNAL', u'Ser.'), ('JOURNAL', u'B'), ('VOLUME', u'25'), ('YEAR', u'1983'), ('PAGE', u'16'), ('REFPLAINTEXT', u'Peregrine, D.H.: Water waves, non-linear Schr\xf6dinger equations and their solutions. J. Aust. Math. Soc. Ser. B 25, 16\u201343 (1983)'), ('REFSTR', "{u'bibunstructured': u'Peregrine, D.H.: Water waves, non-linear Schr\\xf6dinger equations and their solutions. J. Aust. Math. Soc. Ser. B 25, 16\\u201343 (1983)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Peregrine', u'initials': u'DH'}, u'occurrence': {u'handle': u'10.1017/S0334270000003891', u'@type': u'DOI'}, u'journaltitle': u'J. Aust. Math. Soc. Ser. B', u'volumeid': u'25', u'firstpage': u'16', u'lastpage': u'43', u'year': u'1983', u'articletitle': {u'#text': u'Water waves, non-linear Schr\\xf6dinger equations and their solutions', u'@language': u'En'}}, u'citationnumber': u'26.', u'@id': u'CR26'}")],
[('AUTHOR_FIRST_NAME', u'AV'), ('AUTHOR_LAST_NAME', u'Porubov'), ('AUTHOR_FIRST_NAME', u'AM'), ('AUTHOR_LAST_NAME', u'Samsonov'), ('TITLE', u'Long'), ('TITLE', u'nonlinear'), ('TITLE', u'strain'), ('TITLE', u'waves'), ('TITLE', u'in'), ('TITLE', u'layered'), ('TITLE', u'elastic'), ('TITLE', u'half'), ('TITLE', u'space'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Eng.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'30'), ('ISSUE', u'6'), ('YEAR', u'1995'), ('PAGE', u'861'), ('REFPLAINTEXT', u'Porubov, A.V., Samsonov, A.M.: Long nonlinear strain waves in layered elastic half space. Int. J. Eng. Sci. 30(6), 861\u2013877 (1995)'), ('REFSTR', "{u'bibunstructured': u'Porubov, A.V., Samsonov, A.M.: Long nonlinear strain waves in layered elastic half space. Int. J. Eng. Sci. 30(6), 861\\u2013877 (1995)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Porubov', u'initials': u'AV'}, {u'familyname': u'Samsonov', u'initials': u'AM'}], u'issueid': u'6', u'journaltitle': u'Int. J. Eng. Sci.', u'volumeid': u'30', u'firstpage': u'861', u'lastpage': u'877', u'year': u'1995', u'articletitle': {u'#text': u'Long nonlinear strain waves in layered elastic half space', u'@language': u'En'}, u'occurrence': {u'handle': u'0947.74026', u'@type': u'ZLBID'}}, u'citationnumber': u'27.', u'@id': u'CR27'}")],
[('AUTHOR_FIRST_NAME', u'AV'), ('AUTHOR_LAST_NAME', u'Porubov'), ('AUTHOR_FIRST_NAME', u'DF'), ('AUTHOR_LAST_NAME', u'Parker'), ('TITLE', u'Some'), ('TITLE', u'general'), ('TITLE', u'periodic'), ('TITLE', u'solutions'), ('TITLE', u'to'), ('TITLE', u'coupled'), ('TITLE', u'nonlinear'), ('TITLE', u'Schrdinger'), ('TITLE', u'equations'), ('JOURNAL', u'Wave'), ('JOURNAL', u'Motion'), ('VOLUME', u'29'), ('ISSUE', u'2'), ('YEAR', u'1999'), ('PAGE', u'97'), ('DOI', u'10.1016/S0165-2125(98)00033-X'), ('REFPLAINTEXT', u'Porubov, A.V., Parker, D.F.: Some general periodic solutions to coupled nonlinear Schr\xf6dinger equations. Wave Motion 29(2), 97\u2013109 (1999)'), ('REFSTR', "{u'bibunstructured': u'Porubov, A.V., Parker, D.F.: Some general periodic solutions to coupled nonlinear Schr\\xf6dinger equations. Wave Motion 29(2), 97\\u2013109 (1999)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Porubov', u'initials': u'AV'}, {u'familyname': u'Parker', u'initials': u'DF'}], u'issueid': u'2', u'journaltitle': u'Wave Motion', u'volumeid': u'29', u'firstpage': u'97', u'lastpage': u'109', u'year': u'1999', u'articletitle': {u'#text': u'Some general periodic solutions to coupled nonlinear Schr\\xf6dinger equations', u'@language': u'En'}, u'occurrence': [{u'handle': u'1659447', u'@type': u'AMSID'}, {u'handle': u'10.1016/S0165-2125(98)00033-X', u'@type': u'DOI'}]}, u'citationnumber': u'28.', u'@id': u'CR28'}")],
[('AUTHOR_FIRST_NAME', u'C'), ('AUTHOR_LAST_NAME', u'Rogers'), ('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Saccomandi'), ('AUTHOR_FIRST_NAME', u'L'), ('AUTHOR_LAST_NAME', u'Vergori'), ('TITLE', u'Carroll-'), ('TITLE', u'Type'), ('TITLE', u'deformations'), ('TITLE', u'in'), ('TITLE', u'nonlinear'), ('TITLE', u'elastodynamics'), ('JOURNAL', u'J.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'A'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Theor.'), ('VOLUME', u'47'), ('YEAR', u'2014'), ('PAGE', u'205204'), ('DOI', u'10.1088/1751-8113/47/20/205204'), ('REFPLAINTEXT', u'Rogers, C., Saccomandi, G., Vergori, L.: Carroll- Type deformations in nonlinear elastodynamics. J. Phys. A Math. Theor. 47, 205204 (2014)'), ('REFSTR', "{u'bibunstructured': u'Rogers, C., Saccomandi, G., Vergori, L.: Carroll- Type deformations in nonlinear elastodynamics. J. Phys. A Math. Theor. 47, 205204 (2014)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Rogers', u'initials': u'C'}, {u'familyname': u'Saccomandi', u'initials': u'G'}, {u'familyname': u'Vergori', u'initials': u'L'}], u'occurrence': [{u'handle': u'3205918', u'@type': u'AMSID'}, {u'handle': u'10.1088/1751-8113/47/20/205204', u'@type': u'DOI'}], u'journaltitle': u'J. Phys. A Math. Theor.', u'volumeid': u'47', u'firstpage': u'205204', u'year': u'2014', u'articletitle': {u'#text': u'Carroll- Type deformations in nonlinear elastodynamics', u'@language': u'En'}}, u'citationnumber': u'29.', u'@id': u'CR29'}")],
[('AUTHOR_FIRST_NAME', u'G'), ('AUTHOR_LAST_NAME', u'Saccomandi'), ('AUTHOR_FIRST_NAME', u'R'), ('AUTHOR_LAST_NAME', u'Vitolo'), ('TITLE', u'On'), ('TITLE', u'the'), ('TITLE', u'mathematical'), ('TITLE', u'and'), ('TITLE', u'geometrical'), ('TITLE', u'structure'), ('TITLE', u'of'), ('TITLE', u'the'), ('TITLE', u'determining'), ('TITLE', u'equations'), ('TITLE', u'for'), ('TITLE', u'shear'), ('TITLE', u'waves'), ('TITLE', u'in'), ('TITLE', u'nonlinear'), ('TITLE', u'isotropic'), ('TITLE', u'incompresible'), ('TITLE', u'elastodynamics'), ('JOURNAL', u'J.'), ('JOURNAL', u'Math.'), ('JOURNAL', u'Phys.'), ('VOLUME', u'55'), ('YEAR', u'2014'), ('PAGE', u'081502'), ('DOI', u'10.1063/1.4891602'), ('REFPLAINTEXT', u'Saccomandi, G., Vitolo, R.: On the mathematical and geometrical structure of the determining equations for shear waves in nonlinear isotropic incompresible elastodynamics. J. Math. Phys. 55, 081502 (2014)'), ('REFSTR', "{u'bibunstructured': u'Saccomandi, G., Vitolo, R.: On the mathematical and geometrical structure of the determining equations for shear waves in nonlinear isotropic incompresible elastodynamics. J. Math. Phys. 55, 081502 (2014)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Saccomandi', u'initials': u'G'}, {u'familyname': u'Vitolo', u'initials': u'R'}], u'occurrence': [{u'handle': u'3390691', u'@type': u'AMSID'}, {u'handle': u'10.1063/1.4891602', u'@type': u'DOI'}], u'journaltitle': u'J. Math. Phys.', u'volumeid': u'55', u'firstpage': u'081502', u'year': u'2014', u'articletitle': {u'#text': u'On the mathematical and geometrical structure of the determining equations for shear waves in nonlinear isotropic incompresible elastodynamics', u'@language': u'En'}}, u'citationnumber': u'30.', u'@id': u'CR30'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Teymur'), ('TITLE', u'Nonlinear'), ('TITLE', u'modulation'), ('TITLE', u'of'), ('TITLE', u'Love'), ('TITLE', u'waves'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'compressible'), ('TITLE', u'hyperelastic'), ('TITLE', u'layered'), ('TITLE', u'half'), ('TITLE', u'space'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Eng.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'26'), ('YEAR', u'1988'), ('PAGE', u'907'), ('DOI', u'10.1016/0020-7225(88)90021-3'), ('REFPLAINTEXT', u'Teymur, M.: Nonlinear modulation of Love waves in a compressible hyperelastic layered half space. Int. J. Eng. Sci. 26, 907\u2013927 (1988)'), ('REFSTR', "{u'bibunstructured': u'Teymur, M.: Nonlinear modulation of Love waves in a compressible hyperelastic layered half space. Int. J. Eng. Sci. 26, 907\\u2013927 (1988)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Teymur', u'initials': u'M'}, u'occurrence': [{u'handle': u'964165', u'@type': u'AMSID'}, {u'handle': u'10.1016/0020-7225(88)90021-3', u'@type': u'DOI'}], u'journaltitle': u'Int. J. Eng. Sci.', u'volumeid': u'26', u'firstpage': u'907', u'lastpage': u'927', u'year': u'1988', u'articletitle': {u'#text': u'Nonlinear modulation of Love waves in a compressible hyperelastic layered half space', u'@language': u'En'}}, u'citationnumber': u'31.', u'@id': u'CR31'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Teymur'), ('YEAR', u'1989'), ('PUBLISHER', u'Nonlinear'), ('PUBLISHER', u'Wave'), ('PUBLISHER', u'Motion'), ('REFPLAINTEXT', u'Teymur, M.: Nonlinear modulation and the fifth-harmonic resonance of Love waves on a neo-Hookean layered half-space. In: Jeffrey, A. (ed.) Nonlinear Wave Motion. Longman, Harlow, Essex (1989)'), ('REFSTR', "{u'bibunstructured': u'Teymur, M.: Nonlinear modulation and the fifth-harmonic resonance of Love waves on a neo-Hookean layered half-space. In: Jeffrey, A. (ed.) Nonlinear Wave Motion. Longman, Harlow, Essex (1989)', u'bibchapter': {u'eds': {u'publisherlocation': u'Harlow, Essex', u'booktitle': u'Nonlinear Wave Motion', u'publishername': u'Longman', u'occurrence': {u'handle': u'0681.73014', u'@type': u'ZLBID'}}, u'bibauthorname': {u'familyname': u'Teymur', u'initials': u'M'}, u'chaptertitle': {u'#text': u'Nonlinear modulation and the fifth-harmonic resonance of Love waves on a neo-Hookean layered half-space', u'@language': u'En'}, u'bibeditorname': {u'familyname': u'Jeffrey', u'initials': u'A'}, u'year': u'1989'}, u'citationnumber': u'32.', u'@id': u'CR32'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Teymur'), ('TITLE', u'Small'), ('TITLE', u'but'), ('TITLE', u'finite'), ('TITLE', u'amplitude'), ('TITLE', u'waves'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'two-'), ('TITLE', u'layered'), ('TITLE', u'incompressible'), ('TITLE', u'elastic'), ('TITLE', u'medium'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Eng.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'34'), ('YEAR', u'1996'), ('PAGE', u'227'), ('DOI', u'10.1016/0020-7225(95)00084-4'), ('REFPLAINTEXT', u'Teymur, M.: Small but finite amplitude waves in a two-layered incompressible elastic medium. Int. J. Eng. Sci. 34, 227\u2013241 (1996)'), ('REFSTR', "{u'bibunstructured': u'Teymur, M.: Small but finite amplitude waves in a two-layered incompressible elastic medium. Int. J. Eng. Sci. 34, 227\\u2013241 (1996)', u'bibarticle': {u'bibauthorname': {u'familyname': u'Teymur', u'initials': u'M'}, u'occurrence': [{u'handle': u'1367605', u'@type': u'AMSID'}, {u'handle': u'10.1016/0020-7225(95)00084-4', u'@type': u'DOI'}], u'journaltitle': u'Int. J. Eng. Sci.', u'volumeid': u'34', u'firstpage': u'227', u'lastpage': u'241', u'year': u'1996', u'articletitle': {u'#text': u'Small but finite amplitude waves in a two-layered incompressible elastic medium', u'@language': u'En'}}, u'citationnumber': u'33.', u'@id': u'CR33'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Teymur'), ('AUTHOR_FIRST_NAME', u'A'), ('AUTHOR_LAST_NAME', u'Demirci'), ('AUTHOR_FIRST_NAME', u'S'), ('AUTHOR_LAST_NAME', u'Ahmetolan'), ('TITLE', u'Propagation'), ('TITLE', u'of'), ('TITLE', u'surface'), ('TITLE', u'SH'), ('TITLE', u'waves'), ('TITLE', u'on'), ('TITLE', u'a'), ('TITLE', u'half'), ('TITLE', u'space'), ('TITLE', u'covered'), ('TITLE', u'by'), ('TITLE', u'a'), ('TITLE', u'nonlinear'), ('TITLE', u'thin'), ('TITLE', u'layer'), ('JOURNAL', u'Int.'), ('JOURNAL', u'J.'), ('JOURNAL', u'Eng.'), ('JOURNAL', u'Sci.'), ('VOLUME', u'85'), ('YEAR', u'2014'), ('PAGE', u'150'), ('REFPLAINTEXT', u'Teymur, M., Demirci, A., Ahmetolan, S.: Propagation of surface SH waves on a half space covered by a nonlinear thin layer. Int. J. Eng. Sci. 85, 150\u2013162 (2014)'), ('REFSTR', "{u'bibunstructured': u'Teymur, M., Demirci, A., Ahmetolan, S.: Propagation of surface SH waves on a half space covered by a nonlinear thin layer. Int. J. Eng. Sci. 85, 150\\u2013162 (2014)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Teymur', u'initials': u'M'}, {u'familyname': u'Demirci', u'initials': u'A'}, {u'familyname': u'Ahmetolan', u'initials': u'S'}], u'occurrence': {u'handle': u'10.1016/j.ijengsci.2014.08.005', u'@type': u'DOI'}, u'journaltitle': u'Int. J. Eng. Sci.', u'volumeid': u'85', u'firstpage': u'150', u'lastpage': u'162', u'year': u'2014', u'articletitle': {u'#text': u'Propagation of surface SH waves on a half space covered by a nonlinear thin layer', u'@language': u'En'}}, u'citationnumber': u'34.', u'@id': u'CR34'}")],
[('AUTHOR_FIRST_NAME', u'M'), ('AUTHOR_LAST_NAME', u'Teymur'), ('AUTHOR_FIRST_NAME', u'H&Idot'), ('AUTHOR_LAST_NAME', u'Var'), ('AUTHOR_FIRST_NAME', u'E'), ('AUTHOR_LAST_NAME', u'Deliktas'), ('YEAR', u'2019'), ('PUBLISHER', u'Dynamical'), ('PUBLISHER', u'Processes'), ('PUBLISHER', u'in'), ('PUBLISHER', u'Generalized'), ('PUBLISHER', u'Continua'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Structures'), ('REFPLAINTEXT', u'Teymur, M., Var, H&Idot., Deliktas, E.: Nonlinear modulation of surface SH waves in a double layered elastic half space. In: Altenbach, H., Belyaev, A., Eremeyev, V., Krivtsov, A., Porubov, A. (eds.) Dynamical Processes in Generalized Continua and Structures. Advanced Structured Materials, vol. 103. Springer, Cham (2019)'), ('REFSTR', "{u'bibunstructured': u'Teymur, M., Var, H&Idot., Deliktas, E.: Nonlinear modulation of surface SH waves in a double layered elastic half space. In: Altenbach, H., Belyaev, A., Eremeyev, V., Krivtsov, A., Porubov, A. (eds.) Dynamical Processes in Generalized Continua and Structures. Advanced Structured Materials, vol. 103. Springer, Cham (2019)', u'bibchapter': {u'eds': {u'seriestitle': {u'#text': u'Advanced Structured Materials', u'@language': u'En'}, u'publisherlocation': u'Cham', u'occurrence': {u'handle': u'10.1007/978-3-030-11665-1_27', u'@type': u'DOI'}, u'booktitle': u'Dynamical Processes in Generalized Continua and Structures', u'numberinseries': u'103', u'publishername': u'Springer'}, u'bibauthorname': [{u'familyname': u'Teymur', u'initials': u'M'}, {u'familyname': u'Var', u'initials': u'H&Idot'}, {u'familyname': u'Deliktas', u'initials': u'E'}], u'chaptertitle': {u'#text': u'Nonlinear modulation of surface SH waves in a double layered elastic half space', u'@language': u'En'}, u'bibeditorname': [{u'familyname': u'Altenbach', u'initials': u'H'}, {u'familyname': u'Belyaev', u'initials': u'A'}, {u'familyname': u'Eremeyev', u'initials': u'V'}, {u'familyname': u'Krivtsov', u'initials': u'A'}, {u'familyname': u'Porubov', u'initials': u'A'}], u'year': u'2019'}, u'citationnumber': u'35.', u'@id': u'CR35'}")],
[('AUTHOR_FIRST_NAME', u'GB'), ('AUTHOR_LAST_NAME', u'Whitham'), ('YEAR', u'1974'), ('PUBLISHER', u'Linear'), ('PUBLISHER', u'and'), ('PUBLISHER', u'Nonlinear'), ('PUBLISHER', u'Waves'), ('REFPLAINTEXT', u'Whitham, G.B.: Linear and Nonlinear Waves. Wiley, New York (1974)'), ('REFSTR', "{u'bibunstructured': u'Whitham, G.B.: Linear and Nonlinear Waves. Wiley, New York (1974)', u'citationnumber': u'36.', u'@id': u'CR36', u'bibbook': {u'bibauthorname': {u'familyname': u'Whitham', u'initials': u'GB'}, u'publisherlocation': u'New York', u'occurrence': {u'handle': u'0373.76001', u'@type': u'ZLBID'}, u'booktitle': u'Linear and Nonlinear Waves', u'year': u'1974', u'publishername': u'Wiley'}}")],
[('AUTHOR_FIRST_NAME', u'VE'), ('AUTHOR_LAST_NAME', u'Zakharov'), ('AUTHOR_FIRST_NAME', u'AB'), ('AUTHOR_LAST_NAME', u'Shabat'), ('TITLE', u'Interaction'), ('TITLE', u'between'), ('TITLE', u'solitons'), ('TITLE', u'in'), ('TITLE', u'a'), ('TITLE', u'stable'), ('TITLE', u'medium'), ('JOURNAL', u'Sov.'), ('JOURNAL', u'Phys.'), ('JOURNAL', u'JETP'), ('VOLUME', u'37'), ('YEAR', u'1973'), ('PAGE', u'823'), ('REFPLAINTEXT', u'Zakharov, V.E., Shabat, A.B.: Interaction between solitons in a stable medium. Sov. Phys. JETP 37, 823 (1973)'), ('REFSTR', "{u'bibunstructured': u'Zakharov, V.E., Shabat, A.B.: Interaction between solitons in a stable medium. Sov. Phys. JETP 37, 823 (1973)', u'bibarticle': {u'bibauthorname': [{u'familyname': u'Zakharov', u'initials': u'VE'}, {u'familyname': u'Shabat', u'initials': u'AB'}], u'journaltitle': u'Sov. Phys. JETP', u'volumeid': u'37', u'firstpage': u'823', u'year': u'1973', u'articletitle': {u'#text': u'Interaction between solitons in a stable medium', u'@language': u'En'}}, u'citationnumber': u'37.', u'@id': u'CR37'}")]]
| 1,021.83105
| 2,283
| 0.628219
| 67,330
| 447,562
| 4.130061
| 0.055993
| 0.0665
| 0.037206
| 0.039863
| 0.876689
| 0.77726
| 0.754546
| 0.705894
| 0.631674
| 0.563722
| 0
| 0.067968
| 0.10559
| 447,562
| 438
| 2,284
| 1,021.83105
| 0.626696
| 0
| 0
| 0
| 0
| 1.37931
| 0.783425
| 0.03221
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.016092
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
77d7706d273317b8f1ce17a1000ae9b8d0533e23
| 121
|
py
|
Python
|
stable_baselines3/sac_latent/__init__.py
|
NicholasCorrado/stable-baselines3
|
77793947335c6b14747c2c5179a7c05c93289ffd
|
[
"MIT"
] | null | null | null |
stable_baselines3/sac_latent/__init__.py
|
NicholasCorrado/stable-baselines3
|
77793947335c6b14747c2c5179a7c05c93289ffd
|
[
"MIT"
] | null | null | null |
stable_baselines3/sac_latent/__init__.py
|
NicholasCorrado/stable-baselines3
|
77793947335c6b14747c2c5179a7c05c93289ffd
|
[
"MIT"
] | null | null | null |
from stable_baselines3.sac_latent.policies import MlpPolicy
from stable_baselines3.sac_latent.sac_latent import SACLatent
| 60.5
| 61
| 0.909091
| 17
| 121
| 6.176471
| 0.529412
| 0.257143
| 0.380952
| 0.438095
| 0.552381
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.057851
| 121
| 2
| 61
| 60.5
| 0.903509
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
77da424a16d7a0be85babfb3ee9513c48452950e
| 125,756
|
py
|
Python
|
tccli/services/cpdp/cpdp_client.py
|
ws0416/tencentcloud-cli
|
0a90fa77c8be1efa30b196a3eeb31b8be1f6a325
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/cpdp/cpdp_client.py
|
ws0416/tencentcloud-cli
|
0a90fa77c8be1efa30b196a3eeb31b8be1f6a325
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/cpdp/cpdp_client.py
|
ws0416/tencentcloud-cli
|
0a90fa77c8be1efa30b196a3eeb31b8be1f6a325
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.cpdp.v20190820 import cpdp_client as cpdp_client_v20190820
from tencentcloud.cpdp.v20190820 import models as models_v20190820
def doModifyAgentTaxPaymentInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAgentTaxPaymentInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyAgentTaxPaymentInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyMntMbrBindRelateAcctBankCode(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyMntMbrBindRelateAcctBankCodeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyMntMbrBindRelateAcctBankCode(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryOrder(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryOrderRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryOrder(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doWithdrawCashMembership(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.WithdrawCashMembershipRequest()
model.from_json_string(json.dumps(args))
rsp = client.WithdrawCashMembership(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryBankTransactionDetails(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryBankTransactionDetailsRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryBankTransactionDetails(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryCommonTransferRecharge(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryCommonTransferRechargeRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryCommonTransferRecharge(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDownloadBill(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DownloadBillRequest()
model.from_json_string(json.dumps(args))
rsp = client.DownloadBill(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryCustAcctIdBalance(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryCustAcctIdBalanceRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryCustAcctIdBalance(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryPayerInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryPayerInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryPayerInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryTrade(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryTradeRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryTrade(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryTransferDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryTransferDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryTransferDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQuerySingleTransactionStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QuerySingleTransactionStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.QuerySingleTransactionStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doApplyApplicationMaterial(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ApplyApplicationMaterialRequest()
model.from_json_string(json.dumps(args))
rsp = client.ApplyApplicationMaterial(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyMerchant(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyMerchantRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyMerchant(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateInvoiceV2(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateInvoiceV2Request()
model.from_json_string(json.dumps(args))
rsp = client.CreateInvoiceV2(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryInvoice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryInvoiceRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryInvoice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryExchangeRate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryExchangeRateRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryExchangeRate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUnbindRelateAcct(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UnbindRelateAcctRequest()
model.from_json_string(json.dumps(args))
rsp = client.UnbindRelateAcct(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doContractOrder(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ContractOrderRequest()
model.from_json_string(json.dumps(args))
rsp = client.ContractOrder(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTransferBatch(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTransferBatchRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTransferBatch(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUploadTaxPayment(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UploadTaxPaymentRequest()
model.from_json_string(json.dumps(args))
rsp = client.UploadTaxPayment(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryMemberBind(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryMemberBindRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryMemberBind(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeChargeDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeChargeDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeChargeDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryMerchantBalance(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryMerchantBalanceRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryMerchantBalance(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteAgentTaxPaymentInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteAgentTaxPaymentInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteAgentTaxPaymentInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateAgentTaxPaymentInfos(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateAgentTaxPaymentInfosRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateAgentTaxPaymentInfos(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteAgentTaxPaymentInfos(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteAgentTaxPaymentInfosRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteAgentTaxPaymentInfos(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreatePayMerchant(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreatePayMerchantRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreatePayMerchant(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRegisterBehavior(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RegisterBehaviorRequest()
model.from_json_string(json.dumps(args))
rsp = client.RegisterBehavior(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRegisterBillSupportWithdraw(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RegisterBillSupportWithdrawRequest()
model.from_json_string(json.dumps(args))
rsp = client.RegisterBillSupportWithdraw(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUnifiedOrder(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UnifiedOrderRequest()
model.from_json_string(json.dumps(args))
rsp = client.UnifiedOrder(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUnBindAcct(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UnBindAcctRequest()
model.from_json_string(json.dumps(args))
rsp = client.UnBindAcct(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doConfirmOrder(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ConfirmOrderRequest()
model.from_json_string(json.dumps(args))
rsp = client.ConfirmOrder(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCheckAmount(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CheckAmountRequest()
model.from_json_string(json.dumps(args))
rsp = client.CheckAmount(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryTransferResult(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryTransferResultRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryTransferResult(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryOutwardOrder(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryOutwardOrderRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryOutwardOrder(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doMigrateOrderRefund(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.MigrateOrderRefundRequest()
model.from_json_string(json.dumps(args))
rsp = client.MigrateOrderRefund(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryAcctInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryAcctInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryAcctInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryBankWithdrawCashDetails(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryBankWithdrawCashDetailsRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryBankWithdrawCashDetails(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCheckAcct(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CheckAcctRequest()
model.from_json_string(json.dumps(args))
rsp = client.CheckAcct(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryMerchantOrder(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryMerchantOrderRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryMerchantOrder(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateRedInvoiceV2(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateRedInvoiceV2Request()
model.from_json_string(json.dumps(args))
rsp = client.CreateRedInvoiceV2(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSyncContractData(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SyncContractDataRequest()
model.from_json_string(json.dumps(args))
rsp = client.SyncContractData(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryAnchorContractInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryAnchorContractInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryAnchorContractInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryInvoiceV2(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryInvoiceV2Request()
model.from_json_string(json.dumps(args))
rsp = client.QueryInvoiceV2(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindAcct(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindAcctRequest()
model.from_json_string(json.dumps(args))
rsp = client.BindAcct(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindRelateAcctSmallAmount(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindRelateAcctSmallAmountRequest()
model.from_json_string(json.dumps(args))
rsp = client.BindRelateAcctSmallAmount(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateOrder(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateOrderRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateOrder(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doTransferSinglePay(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.TransferSinglePayRequest()
model.from_json_string(json.dumps(args))
rsp = client.TransferSinglePay(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryMerchant(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryMerchantRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryMerchant(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doReviseMbrProperty(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ReviseMbrPropertyRequest()
model.from_json_string(json.dumps(args))
rsp = client.ReviseMbrProperty(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryBalance(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryBalanceRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryBalance(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRevokeRechargeByThirdPay(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RevokeRechargeByThirdPayRequest()
model.from_json_string(json.dumps(args))
rsp = client.RevokeRechargeByThirdPay(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doApplyReWithdrawal(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ApplyReWithdrawalRequest()
model.from_json_string(json.dumps(args))
rsp = client.ApplyReWithdrawal(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryAgentTaxPaymentBatch(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryAgentTaxPaymentBatchRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryAgentTaxPaymentBatch(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeOrderStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeOrderStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeOrderStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryAcctBinding(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryAcctBindingRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryAcctBinding(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUploadTaxList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UploadTaxListRequest()
model.from_json_string(json.dumps(args))
rsp = client.UploadTaxList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doApplyPayerInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ApplyPayerInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.ApplyPayerInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doApplyTrade(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ApplyTradeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ApplyTrade(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryBankClear(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryBankClearRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryBankClear(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindRelateAccReUnionPay(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindRelateAccReUnionPayRequest()
model.from_json_string(json.dumps(args))
rsp = client.BindRelateAccReUnionPay(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doApplyWithdrawal(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ApplyWithdrawalRequest()
model.from_json_string(json.dumps(args))
rsp = client.ApplyWithdrawal(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRevResigterBillSupportWithdraw(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RevResigterBillSupportWithdrawRequest()
model.from_json_string(json.dumps(args))
rsp = client.RevResigterBillSupportWithdraw(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryApplicationMaterial(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryApplicationMaterialRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryApplicationMaterial(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRefund(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RefundRequest()
model.from_json_string(json.dumps(args))
rsp = client.Refund(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRechargeMemberThirdPay(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RechargeMemberThirdPayRequest()
model.from_json_string(json.dumps(args))
rsp = client.RechargeMemberThirdPay(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateMerchant(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateMerchantRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateMerchant(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateAcct(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateAcctRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateAcct(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doExecuteMemberTransaction(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ExecuteMemberTransactionRequest()
model.from_json_string(json.dumps(args))
rsp = client.ExecuteMemberTransaction(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRegisterBill(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RegisterBillRequest()
model.from_json_string(json.dumps(args))
rsp = client.RegisterBill(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQuerySinglePay(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QuerySinglePayRequest()
model.from_json_string(json.dumps(args))
rsp = client.QuerySinglePay(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateRedInvoice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateRedInvoiceRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateRedInvoice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRechargeByThirdPay(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RechargeByThirdPayRequest()
model.from_json_string(json.dumps(args))
rsp = client.RechargeByThirdPay(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSinglePay(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSinglePayRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateSinglePay(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateCustAcctId(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateCustAcctIdRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateCustAcctId(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCloseOrder(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CloseOrderRequest()
model.from_json_string(json.dumps(args))
rsp = client.CloseOrder(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryMerchantInfoForManagement(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryMerchantInfoForManagementRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryMerchantInfoForManagement(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRevokeMemberRechargeThirdPay(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RevokeMemberRechargeThirdPayRequest()
model.from_json_string(json.dumps(args))
rsp = client.RevokeMemberRechargeThirdPay(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindRelateAcctUnionPay(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindRelateAcctUnionPayRequest()
model.from_json_string(json.dumps(args))
rsp = client.BindRelateAcctUnionPay(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryMemberTransaction(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryMemberTransactionRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryMemberTransaction(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryBillDownloadURL(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryBillDownloadURLRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryBillDownloadURL(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateInvoice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateInvoiceRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateInvoice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryRefund(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryRefundRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryRefund(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRefundMemberTransaction(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RefundMemberTransactionRequest()
model.from_json_string(json.dumps(args))
rsp = client.RefundMemberTransaction(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doMigrateOrderRefundQuery(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.MigrateOrderRefundQueryRequest()
model.from_json_string(json.dumps(args))
rsp = client.MigrateOrderRefundQuery(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryAgentStatements(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryAgentStatementsRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryAgentStatements(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryContract(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryContractRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryContract(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doApplyOutwardOrder(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ApplyOutwardOrderRequest()
model.from_json_string(json.dumps(args))
rsp = client.ApplyOutwardOrder(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQuerySmallAmountTransfer(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QuerySmallAmountTransferRequest()
model.from_json_string(json.dumps(args))
rsp = client.QuerySmallAmountTransfer(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryTransferBatch(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryTransferBatchRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryTransferBatch(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doTerminateContract(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.TerminateContractRequest()
model.from_json_string(json.dumps(args))
rsp = client.TerminateContract(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryAcctInfoList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryAcctInfoListRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryAcctInfoList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doQueryReconciliationDocument(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.QueryReconciliationDocumentRequest()
model.from_json_string(json.dumps(args))
rsp = client.QueryReconciliationDocument(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRefundOrder(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint],
proxy=g_param[OptionsDefine.HttpsProxy]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CpdpClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RefundOrderRequest()
model.from_json_string(json.dumps(args))
rsp = client.RefundOrder(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20190820": cpdp_client_v20190820,
}
MODELS_MAP = {
"v20190820": models_v20190820,
}
ACTION_MAP = {
"ModifyAgentTaxPaymentInfo": doModifyAgentTaxPaymentInfo,
"ModifyMntMbrBindRelateAcctBankCode": doModifyMntMbrBindRelateAcctBankCode,
"QueryOrder": doQueryOrder,
"WithdrawCashMembership": doWithdrawCashMembership,
"QueryBankTransactionDetails": doQueryBankTransactionDetails,
"QueryCommonTransferRecharge": doQueryCommonTransferRecharge,
"DownloadBill": doDownloadBill,
"QueryCustAcctIdBalance": doQueryCustAcctIdBalance,
"QueryPayerInfo": doQueryPayerInfo,
"QueryTrade": doQueryTrade,
"QueryTransferDetail": doQueryTransferDetail,
"QuerySingleTransactionStatus": doQuerySingleTransactionStatus,
"ApplyApplicationMaterial": doApplyApplicationMaterial,
"ModifyMerchant": doModifyMerchant,
"CreateInvoiceV2": doCreateInvoiceV2,
"QueryInvoice": doQueryInvoice,
"QueryExchangeRate": doQueryExchangeRate,
"UnbindRelateAcct": doUnbindRelateAcct,
"ContractOrder": doContractOrder,
"CreateTransferBatch": doCreateTransferBatch,
"UploadTaxPayment": doUploadTaxPayment,
"QueryMemberBind": doQueryMemberBind,
"DescribeChargeDetail": doDescribeChargeDetail,
"QueryMerchantBalance": doQueryMerchantBalance,
"DeleteAgentTaxPaymentInfo": doDeleteAgentTaxPaymentInfo,
"CreateAgentTaxPaymentInfos": doCreateAgentTaxPaymentInfos,
"DeleteAgentTaxPaymentInfos": doDeleteAgentTaxPaymentInfos,
"CreatePayMerchant": doCreatePayMerchant,
"RegisterBehavior": doRegisterBehavior,
"RegisterBillSupportWithdraw": doRegisterBillSupportWithdraw,
"UnifiedOrder": doUnifiedOrder,
"UnBindAcct": doUnBindAcct,
"ConfirmOrder": doConfirmOrder,
"CheckAmount": doCheckAmount,
"QueryTransferResult": doQueryTransferResult,
"QueryOutwardOrder": doQueryOutwardOrder,
"MigrateOrderRefund": doMigrateOrderRefund,
"QueryAcctInfo": doQueryAcctInfo,
"QueryBankWithdrawCashDetails": doQueryBankWithdrawCashDetails,
"CheckAcct": doCheckAcct,
"QueryMerchantOrder": doQueryMerchantOrder,
"CreateRedInvoiceV2": doCreateRedInvoiceV2,
"SyncContractData": doSyncContractData,
"QueryAnchorContractInfo": doQueryAnchorContractInfo,
"QueryInvoiceV2": doQueryInvoiceV2,
"BindAcct": doBindAcct,
"BindRelateAcctSmallAmount": doBindRelateAcctSmallAmount,
"CreateOrder": doCreateOrder,
"TransferSinglePay": doTransferSinglePay,
"QueryMerchant": doQueryMerchant,
"ReviseMbrProperty": doReviseMbrProperty,
"QueryBalance": doQueryBalance,
"RevokeRechargeByThirdPay": doRevokeRechargeByThirdPay,
"ApplyReWithdrawal": doApplyReWithdrawal,
"QueryAgentTaxPaymentBatch": doQueryAgentTaxPaymentBatch,
"DescribeOrderStatus": doDescribeOrderStatus,
"QueryAcctBinding": doQueryAcctBinding,
"UploadTaxList": doUploadTaxList,
"ApplyPayerInfo": doApplyPayerInfo,
"ApplyTrade": doApplyTrade,
"QueryBankClear": doQueryBankClear,
"BindRelateAccReUnionPay": doBindRelateAccReUnionPay,
"ApplyWithdrawal": doApplyWithdrawal,
"RevResigterBillSupportWithdraw": doRevResigterBillSupportWithdraw,
"QueryApplicationMaterial": doQueryApplicationMaterial,
"Refund": doRefund,
"RechargeMemberThirdPay": doRechargeMemberThirdPay,
"CreateMerchant": doCreateMerchant,
"CreateAcct": doCreateAcct,
"ExecuteMemberTransaction": doExecuteMemberTransaction,
"RegisterBill": doRegisterBill,
"QuerySinglePay": doQuerySinglePay,
"CreateRedInvoice": doCreateRedInvoice,
"RechargeByThirdPay": doRechargeByThirdPay,
"CreateSinglePay": doCreateSinglePay,
"CreateCustAcctId": doCreateCustAcctId,
"CloseOrder": doCloseOrder,
"QueryMerchantInfoForManagement": doQueryMerchantInfoForManagement,
"RevokeMemberRechargeThirdPay": doRevokeMemberRechargeThirdPay,
"BindRelateAcctUnionPay": doBindRelateAcctUnionPay,
"QueryMemberTransaction": doQueryMemberTransaction,
"QueryBillDownloadURL": doQueryBillDownloadURL,
"CreateInvoice": doCreateInvoice,
"QueryRefund": doQueryRefund,
"RefundMemberTransaction": doRefundMemberTransaction,
"MigrateOrderRefundQuery": doMigrateOrderRefundQuery,
"QueryAgentStatements": doQueryAgentStatements,
"QueryContract": doQueryContract,
"ApplyOutwardOrder": doApplyOutwardOrder,
"QuerySmallAmountTransfer": doQuerySmallAmountTransfer,
"QueryTransferBatch": doQueryTransferBatch,
"TerminateContract": doTerminateContract,
"QueryAcctInfoList": doQueryAcctInfoList,
"QueryReconciliationDocument": doQueryReconciliationDocument,
"RefundOrder": doRefundOrder,
}
AVAILABLE_VERSION_LIST = [
"v20190820",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["cpdp"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["cpdp"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param
| 43.939902
| 105
| 0.728625
| 14,204
| 125,756
| 6.210645
| 0.028865
| 0.085223
| 0.247041
| 0.056883
| 0.871713
| 0.869684
| 0.868867
| 0.868051
| 0.866986
| 0.820565
| 0
| 0.008061
| 0.163459
| 125,756
| 2,861
| 106
| 43.95526
| 0.830494
| 0.007721
| 0
| 0.748038
| 0
| 0
| 0.038488
| 0.006095
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038069
| false
| 0
| 0.00471
| 0.000392
| 0.043564
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.